After using node a lot, I had to get used to writing my code in a non-blocking way, however the main way I can do this is by using functions that are themselves asynchronous. For example: stat(f,callback)
or forEach(array, callback)
They automatically take whatever callback you gave them out of what I would think to be the main execution highway and return immediately after being called.
What I want to know is: how can I tell the ECMA engine to execute a function asynchronously nomatter what it is?
My Particular use case involves iterating a for-loop over a DOM childList to parse the thousands of elements; my problem is that every other element is a text node which I'd like to skip over. While I would use forEach()
this was not best, I only see for(a,i=0;a=table[i];i=i+2){/*process 'a'*/}
being able to rectify that, at the cost of being blocking. What would be the best course of action?
Bonus Question: Does NodeJS's coding practices hold any ground in clientside applications in use cases where JS has to do heavy lifting?
Note: Array.prototype.forEach
is synchronous, not asynchronous. Anything defined in the JS standard (ECMAScript 5th edition) cannot be asynchronous, because the standard does not define async semantics (Node.js and the DOM do).
You can use setTimeout
(works in browsers and in Node.js) or process.nextTick
(Node.js-specific):
for (...) {
doWorkAsync(...);
}
function doWorkAsync(...) {
setTimeout(doWorkSync.bind(null, ...), 0);
}
function doWorkSync(...) {
...
}
Be careful when using free variables if you choose to exploit closures, as the variables may be mutated when your callback is finally called.
With an async framework, such as Q by kriskowal (portable across Node.js and modern browsers), you can do mapreduce-style programming:
var Q = require('q'); // npm package 'q'
function getWorkloads() {
var workloads = [ ];
for (...) {
workloads.push(Q.fcall(doWorkSync.bind(null, ...)));
}
return workloads;
}
Q.all(getWorkloads()).then(function (results) {
// results array corresponds to
// the array returned by getWorkloads.
});
I am in the same boat. I kind of liked Node's async functions, so i wrote this async For and ForEach function. It uses the "setTimeout(Func,0);" trick.
Here is the library:
var WilkesAsyncBurn = function()
{
var Now = function() {return (new Date());};
var CreateFutureDate = function(milliseconds)
{
var t = Now();
t.setTime(t.getTime() + milliseconds);
return t;
};
var For = function(start, end, eachCallback, finalCallback, msBurnTime)
{
var i = start;
var Each = function()
{
if(i==-1) {return;} //always does one last each with nothing to do
setTimeout(Each,0);
var burnTimeout = CreateFutureDate(msBurnTime);
while(Now() < burnTimeout)
{
if(i>=end) {i=-1; finalCallback(); return;}
eachCallback(i);
i++;
}
};
Each();
};
var ForEach = function(array, eachCallback, finalCallback, msBurnTime)
{
var i = 0;
var len = array.length;
var Each = function()
{
if(i==-1) {return;}
setTimeout(Each,0);
var burnTimeout = CreateFutureDate(msBurnTime);
while(Now() < burnTimeout)
{
if(i>=len) {i=-1; finalCallback(array); return;}
eachCallback(i, array[i]);
i++;
}
};
Each();
};
var pub = {};
pub.For = For; //eachCallback(index); finalCallback();
pub.ForEach = ForEach; //eachCallback(index,value); finalCallback(array);
WilkesAsyncBurn = pub;
};
Example Usage:
WilkesAsyncBurn(); // Init the library
console.log("start");
var FuncEach = function(index)
{
if(index%10000==0)
{
console.log("index=" + index);
}
};
var FuncFinal = function()
{
console.log("done");
};
WilkesAsyncBurn.For(0,2000000,FuncEach,FuncFinal,50);
prints: index=10000 index=20000 index=30000 etc "done"
More research if interested:
setTimeout and setInterval have a minimum overhead time of about 2 to 10 milliseconds, therefore, firing thousands or millions of timers is going to be slow for no reason. So basically, if you need to perform thousands or more loops without locking the browser, you need to be more like a thread (gasp), and "burn" some code for a set amount of time, rather than set amount of iterations.