Setting process.stdout to a file per node.js core cluster worker

I'm trying to use node core's cluster feature.

I would like the stdout and stderr streams to output to a file, one for each worker id.

Something much like the following:

var fs          = require('fs'), 
    env         = process.env,
    workerId    = env.NODE_WORKER_ID || env.NODE_UNIQUE_ID;

process.stdout =  fs.createWriteStream(__dirname + '/app#' + workerId + '.log', {
    encoding: 'utf8'
});

Unfortunately, it does not seem to rewrite process.stdout like this.

Is there a way to achieve this, or should this be done differently? Currently when I run my cluster I am getting all output from all processes in one console, which is extremely messy.

I ended up doing the following:

    //create a new stdout file stream
    var stdoutFS = fs.createWriteStream(stdoutFile, {
        encoding: 'utf8',
        flags   : 'a+'
    });

    //create a new stderr file stream
    var stderrFS = fs.createWriteStream(stderrFile, {
        encoding: 'utf8',
        flags   : 'a+'
    });

    //pipe stdout to a worker file
    var unhookStdout = hookWriteStream(stdout, function(string, encoding, fd) {
        stdoutFS.write(string, encoding || 'utf8');
    });
    console.log('\n\nPrepared new stdout hook to worker file.');

    //pipe stderr to a worker file
    var unhookStderr = hookWriteStream(stderr, function(string, encoding, fd) {
        stderrFS.write(string, encoding || 'utf8');
    });
    console.log('Prepared new stderr hook to worker file.');

    //unhook when things go wrong
    stdoutFS.once('close', function() {
        unhookStdout();
        console.log('Unhooked stdout.');
    });
    stdoutFS.once('error', function(err) {
        unhookStdout();
        console.error('Error: Unhooked stdout due to error %j.', err);
    });
    stderrFS.once('close', function() {
        unhookStderr();
        console.log('Unhooked stderr.');
    });
    stderrFS.once('error', function(err) {
        unhookStderr();
        console.error('Error: Unhooked stderr due to error %j.', err);
    });

});

function hookWriteStream(stream, callback) {
    var oldWrite = stream.write;

    stream.write = (function(write) {
        return function(string, encoding, fd) {
            write.apply(stream, arguments);
            callback(string, encoding, fd);
        };
    })(stream.write);

    return function() {
        stream.write = oldWrite;
    };
}

It may not be very elegant, but so far this is the best solution I've found.

Looks like my idea works to some degree. As long as most of the logging is done using console.log and not written directly to stdout, you'll be good.

Like I said in the comments below, use a script like this:

fs = require 'fs'
{exec} = require 'child_process'

execAndPipe = (execString) ->
    piper = exec execString

    piper.stdout.on 'data', (data) ->
        if data[0...'PROCESS'.length] == 'PROCESS'
            # extract the worker ID and output 
            # to a corresponding file
    piper.stderr.on 'data', (data) ->
        if data[0...'PROCESS'.length] == 'PROCESS'
            # extract the worker ID and output 
            # to a corresponding file


task 'run', 'Run the server', ->
    execAndPipe 'node blah.js'

to run your server. Then just redefine console.log like:

console.log = function (d) {
    process.stdout.write('PROCESS' + WORKERID + d + '\n');
};

I kinda doubt you'll be able to rebind stdout directly, so this might be amongst your best options.

If you don't want anything to output to the console at ALL, you could rebind console.log like:

console.log = function (d) {
    var str = fs.createWriteStream(__dirname + '/app#' + workerId + '.log', {
        encoding: 'utf8'
    });
    process.stdout.pipe(str);
};

And forget about the external script.

Use caterpillar?

Also ugh, Coffeescript in answers. ugh.