nodejs read file and make http request

There is a Nodejs script to read a group of files one by one. And for each file, read the documents line by line, after read a line, it will make an http post require to send the line to a remote server. Then read the next line. The question is the script will miss some lines.

Thank you.

It seems that lr.pause(); just hide line event, instead of pause the read file process.

var fs = require('fs');
var http = require('http');
var JSON = require('JSON');
var S = require('string');
var uuid = require('node-uuid');
var readline = require('readline');
var httpsync = require('httpsync');
var LineByLineReader = require('line-by-line');
var sleep = require('sleep');

function postES(_path,data,id,lr){
  var post_data = JSON.stringify(data);
  var post_options = {
      host: _host,
      port: _port,
      path: _path,
      method: 'POST',
      headers: {
          'Content-Type': 'application/x-www-form-urlencoded',
          'Content-Length': post_data.length
      }
  };
  var post_req = http.request(post_options, function(res) {
      res.setEncoding('utf8');
      res.on('data', function(data) {
        console.log(data);
      });
      res.on('end', function() {
        console.log("end");
        // resume read line
        lr.resume(); 
      });
  });
  post_req.on('error', function(data) {
    console.log("error,post."+data+post_data);
    // resume read line
    lr.resume();
  }); 
  post_req.write(post_data);
  post_req.end();
}


function readlineFunSession(line,id,lr) { 
    var _data={};
    // compose _data object
    postES('/cs/session/'+_data["sessionid"],_data,id,lr);
}

function readfileFun(files,start,end,id,readlineFun) {
  if(start<end && start<files.length){
    var lr = new LineByLineReader(files[start],{encoding:'utf8',skipEmptyLines:true});
    lr.on('error', function (e) {
      console.log('error,LineByLineReader.'+e.toString());
    });
    lr.on('line', function (line) {
      // pause read line
      lr.pause();
      try{
        readlineFun(line,id,lr);
      }catch(e){
        console.log('error,line.'+e.toString());
      }
    });
    lr.on('end', function () {
      readfileFun(files,++start,end,id,readlineFun);
    });
  }
}

// var files is an arry of files
// this function try to go throgh file[0],file[1],file[2],......,file[10],
readfileFun(files,0,10,"ID-1",readlineFunSession);

Do a series of action where next action should run after only the current finish in nodejs is a bit difficult due its asynchronous paradigm, one way you can do is using sync maker npm like fiber or waterfall,

but other simple (and stupid) way you can do is create dummy worker manager, make your nodejs run infinitely, while every (time interval), check if the current progress is done, run next action if it did done.

btw while you can't make request to become sync, you can read file synchronously, so in your case, i think you should read all lines in all files to become one big array of line.

var jswget = require("jswget");
var arrayoflines = ["line1", "line2", "line3"];
var counter = 0;
var inProgress = false;
var request = function(){
    if (arrayoflines.length == 0) {
        // no more line, should exit
        process.exit();
    }        
    if (inProgress) {
       // if previous work is not completed then skip.
       return;
    }
    // get first line, and remove it from array index
    var current_line = arrayoflines.shift();
    inProgress = true;
    jswget({
       url: "http://someurl:3000/somepath?q1=" + current_line,
       method: 'POST',
       formdata: some_postdata,
       headers: {
         'Content-Type': 'application/x-www-form-urlencoded',
         'Content-Length': post_data.length
       },
       onsuccess: function(responsetext, req, res){
          // success requesting, should do next line
       },
       onerror: function(err, req){
          // oops, error occurred, but we will do next line nevertheless
       },
       onend: function(){
          // success or not, the request is end, so we should prepare for next request
          counter+=1;
          inProgress = false;
       }
    })
}
setInterval(function(){
   request();
}, 100)

This may help you...

With Node 0.12, it's possible to do this synchronously now:

  var fs = require('fs');
  var path = require('path');

  // Buffer mydata
  var BUFFER = bufferFile('../public/mydata.txt');

  function bufferFile(relPath) {
    return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
  }

fs is the file system. readFileSync() returns a Buffer, or string if you ask.

fs correctly assumes relative paths are a security issue. path is a work-around.

To load as a string, specify the encoding:

return fs.readFileSync(path,{ encoding: 'utf8' });