How to concurrently write a stream and read the data in node.js?

I have a node.js stream that I am temporarily writing to an array like this:

var tempCrossSection = [];

stream.on('data', function(data) {
    tempCrossSection.push(data);
});

Then I am periodically taking the data in that array (and clearing it) and doing some processing on it like this:

var crossSection = [];

setInterval(function() {
    crossSection = tempCrossSection;
    tempCrossSection = [];

    someOtherFunction(crossSection, function(data) {
        console.log(data);
    }
}, 30000);

The problem is that I get some odd behavior with the order that the stream is being written to the array and the number of setInterval callbacks that are fired as the stream rate increases and/or the someOtherFunction callback takes too long.

How should I implement this so that the stream is correctly writing data to the array (in order) and the data processing is being conducted once per setInterval callback.

There are a few issues with your code. First of all you are sharing to much state. For example crossSection should be solely defined in the anonymous Interval function. Why is "crossSection" defined as a closure? If someOtherFunction runs for a long period you might indeed into somekind of race conditions.

var source = [];

stream.on('data', function(data) {
    source.push(data);
});

setInterval(function() {
    var target = source;
    source = [];

    someOtherFunction(target, function(data) {
        console.log(data);
    }
}, 30000);

If you have access to someOtherFunction then I would rewrite the whole thing like this

var source = [];

stream.on('data', function(data) {
    source.push(data);
});

setInterval(function() {
    var processing = true;

    while (processing) {
        var elem = source.shift();
        someOtherFunction(elem, function(data) {
            console.log(data);
        });
        processing = checkForBreakConditionAndReturnFalseIfBreak();
    }
}, 30000);

Still you might run into some issues if the number of elements is to big and someOtherFunctions takes to long. So I'd probably do something like this

var source = [];
var timerId = 0;

stream.on('data', function(data) {
    source.push(data);
});

function processSource() {
    clearTimeout(timerId);
    var processing = true;

    while (processing) {
        var elem = source.shift();
        someOtherFunction(elem, function(data) {
            console.log(data);
        });
        processing = checkForBreakConditionAndReturnFalseIfBreak();
    }
    setTimeout(processSource, calcTimeoutForNextProcessingDependentOnPastData());
};

setTimeout(processSource, 30000); //initial Timeout