I'm using this asynchronous recursive function to iterate through a directory's files and folders, and when a .css file is found, I append data to a file called 'common.css'.
var walk = function(dir, done) {
var results = [];
fs.readdir(dir, function(err, list) {
if (err) return done(err);
var pending = list.length;
if (!pending) return done(null);
list.forEach(function(file) {
file = dir + '/' + file;
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
minimizer(file, function(err, res) {
results = results.concat(res);
if (!--pending) done(null);
});
} else if(file) {
fs.open('common.css', 'a', 666, function( err, id ) {
fs.write( id, 'hello', null, 'utf8', function(){
fs.close(id, function(){
console.log('file closed');
});
});
});
}
if (!--pending) done(null);
});
});
}); }
The problem is that, being asyncronous, I sense that there are times, where several instances of the function are writing to the file at the same time. Is there any way I can control this flux and queue the writing tasks?
The purpose of this code is to merge the .css files that are found in the directory. I cannot use external tools in this project.
EDIT ANSWER: Well, for this purpose, I can just gather the paths of all .css files I want to merge, and after I have them all, call a syncronous function to write them.
That's what the
var results = []
is there for. I just realized it.
I think that I would look into using something like the async.js library which has capabilities for making sure things happen in the right order, including async.forEachSeries(arr, eachFn, doneFn)
My suggestion would be if you are doing this as a standalone application - not part of some much larger app, you could write all of your css files to console.log and invoke by "node combine.js > combined.cc". From what I've seen using FileStreams and console.log, it doesn't mishmash writes.
You could open common.css
at the beginning and keep the file descriptor open, then call fs.write(id, ...)
whenever you encounter a CSS file. This is instead of your current method of re-opening common.css
each time.
In your code, it looks like you are writing hello
to the file. If that is literally true, or if what you’re writing is relatively short (typically 512 or 4096 bytes depending on platform), all the appends to common.css
will be atomic so different asynchronous functions writing at the same time won’t interfere with each other.
Well, I ended up using a serial recursive function for the same purpose. It solved the problem . In the answer of user "chjj" in taken from node.js fs.readdir recursive directory search
var fs = require('fs');
var walk = function(dir, done) {
var results = [];
fs.readdir(dir, function(err, list) {
if (err) return done(err);
var i = 0;
(function next() {
var file = list[i++];
if (!file) return done(null, results);
file = dir + '/' + file;
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function(err, res) {
results = results.concat(res);
next();
});
} else {
results.push(file);
next();
}
});
})();
});
};
Thank you all for your suggestions :)