Convert a text from text file to array with fs [node js]

I have a txt file contains:

{"date":"2013/06/26","statement":"insert","nombre":1} {"date":"2013/06/26","statement":"insert","nombre":1} {"date":"2013/06/26","statement":"select","nombre":4}

how I can convert the contents of the text file as array such as:

statement = [
{"date":"2013/06/26","statement":"insert","nombre":1}, {"date":"2013/06/26","statement":"insert","nombre":1}, {"date":"2013/06/26","statement":"select","nombre":4}, ];

I use the fs module node js. Thanks

Sorry I will explain more detailed:

I have an array :

st = [
    {"date":"2013/06/26","statement":"insert","nombre":1},
    {"date":"2013/06/26","statement":"insert","nombre":5},
    {"date":"2013/06/26","statement":"select","nombre":4},
];

if I use this code :

var arr = new LINQ(st)
    .OrderBy(function(x) {return x.nombre;})
    .Select(function(x) {return x.statement;})
    .ToArray();

I get the result I want.

insert select insert

but the problem my data is in a text file. any suggestion and thanks again.

If it's a small file, you might get away with something like this:

// specifying the encoding means you don't have to do `.toString()`
var arrayOfThings = fs.readFileSync("./file", "utf8").trim().split(/[\r\n]+/g).map(function(line) {
  // this try/catch will make it so we just return null
  // for any lines that don't parse successfully, instead
  // of throwing an error.
  try {
    return JSON.parse(line);
  } catch (e) {
    return null;
  }
// this .filter() removes anything that didn't parse correctly
}).filter(function(object) {
  return !!object;
});

If it's larger, you might want to consider reading it in line-by-line using any one of the many modules on npm for consuming lines from a stream.

Wanna see how to do it with streams? Let's see how we do it with streams. This isn't a practical example, but it's fun anyway!

var stream = require("stream"),
    fs = require("fs");

var LineReader = function LineReader(options) {
  options = options || {};
  options.objectMode = true;

  stream.Transform.call(this, options);

  this._buffer = "";
};
LineReader.prototype = Object.create(stream.Transform.prototype, {constructor: {value: LineReader}});

LineReader.prototype._transform = function _transform(input, encoding, done) {
  if (Buffer.isBuffer(input)) {
    input = input.toString("utf8");
  }

  this._buffer += input;

  var lines = this._buffer.split(/[\r\n]+/);

  this._buffer = lines.pop();

  for (var i=0;i<lines.length;++i) {
    this.push(lines[i]);
  }

  return done();
};

LineReader.prototype._flush = function _flush(done) {
  if (this._buffer.length) {
    this.push(this._buffer);
  }

  return done();
};

var JSONParser = function JSONParser(options) {
  options = options || {};
  options.objectMode = true;

  stream.Transform.call(this, options);
};
JSONParser.prototype = Object.create(stream.Transform.prototype, {constructor: {value: JSONParser}});

JSONParser.prototype._transform = function _transform(input, encoding, done) {
  try {
    input = JSON.parse(input);
  } catch (e) {
    return done(e);
  }

  this.push(input);

  return done();
};

var Collector = function Collector(options) {
  options = options || {};
  options.objectMode = true;

  stream.Transform.call(this, options);

  this._entries = [];
};
Collector.prototype = Object.create(stream.Transform.prototype, {constructor: {value: Collector}});

Collector.prototype._transform = function _transform(input, encoding, done) {
  this._entries.push(input);

  return done();
};

Collector.prototype._flush = function _flush(done) {
  this.push(this._entries);

  return done();
};

fs.createReadStream("./file").pipe(new LineReader()).pipe(new JSONParser()).pipe(new Collector()).on("readable", function() {
  var results = this.read();

  console.log(results);
});

There is no reason for not to do your file parser yourself. This will work on any size of a file:

var fs = require('fs');

var fileStream = fs.createReadStream('file.txt');

var data = "";

fileStream.on('readable', function() {
  //this functions reads chunks of data and emits newLine event when \n is found
  data += fileStream.read();
  while( data.indexOf('\n') >= 0 ){
    fileStream.emit('newLine', data.substring(0,data.indexOf('\n')));
    data = data.substring(data.indexOf('\n')+1);
  }
});

fileStream.on('end', function() {
  //this functions sends to newLine event the last chunk of data and tells it
  //that the file has ended
  fileStream.emit('newLine', data , true);
});

var statement = [];

fileStream.on('newLine',function(line_of_text, end_of_file){
    //this is the code where you handle each line
    // line_of_text = string which contains one line
    // end_of_file = true if the end of file has been reached
    statement.push( JSON.parse(line_of_text) );
    if(end_of_file){
               console.dir(statement);
               //here you have your statement object ready
    }
});

fs.readFileSync("myfile.txt").toString().split(/[\r\n]/)

This gets your each line as a string

You can then use UnderscoreJS or your own for loop to apply the JSON.parse("your json string") method to each element of the array.