'Write after end' error with Express, csvtojson and node-walk

I'm having issues sending JSON data from csv files with Express. It takes a folder of CSV files, loops through, converts it all to JSON and then spits it out at the end, but I'm getting the error Error: write after end as well as (node) warning: possible EventEmitter memory leak detected. 11 listeners added. Use emitter.setMaxListeners() to increase limit. after the first load. The error is only for when I try and hit the /get path, not the main index page.

I know it has something to do with read streams being closed or not closed, but I don't have enough knowledge to work out exactly what to do. Here's my code; any advice would be lovely. Ta!

Ps, technologies used are Express, Node-walk and CSVtoJSON.

init.js

var _ = require('underscore')._;
var express = require('express');

var server = express();

server.use(express.static(__dirname + '/frontend/public/'));
server.listen(1234);

var getData = require('./src/getData.js');

server.get('/get', function (req, res) {
  getData.get(function (data) {
    res.send(data);
  })
});

server.get('/', function (req, res) {
  res.sendFile('./index.html');
});

getData.js

var walk      = require('walk');
var fs        = require('fs');
var _         = require('underscore')._;
var csvtojson = require('csvtojson').core.Converter;

var files = [];

var csvConverter = new csvtojson({constructResult: true});

exports.get = function (callback) {

  var csvWalker = walk.walk('./place-csvs-here', {followLinks: false});

  csvWalker.on('file', function (root, stat, next) {
    if (stat.name.match(/[A-Z]+?\-\d+?\.csv/gi)) {
      var filename = root + '/' + stat.name;
      files.push(fs.createReadStream(filename));
    }
    next();
  });

  csvWalker.on('end', function () {
    files.forEach(function (file) {
      file.pipe(csvConverter);
    });
  });

  csvConverter.on('end_parsed', function (data) {
    if (callback) {
      // Ensure rows are unique.
      data = _.uniq(data, function (x) {
        return JSON.stringify(x);
      });
      // Required fields.
      data = _.filter(data, function (x) {
        return (
           x['Date']    !== undefined
        && x['Balance'] !== undefined
        && x['Date']    !== 'Date'
        && x['Balance'] !== 'Balance'
          );
      });
      callback(data);
    }
  });

}

You're trying to re-use both files and the same csv converter instance for all requests. You're also trying to pipe many files at the same time to one parser instance. This is likely to confuse the parser.

Try this getData.js instead:

var walk      = require('walk');
var fs        = require('fs');
var _         = require('underscore')._;
var csvtojson = require('csvtojson').core.Converter;

exports.get = function (callback) {

  var csvWalker = walk.walk('./place-csvs-here', {followLinks: false});
  var files = [];
  var ret = [];

  csvWalker.on('file', function (root, stat, next) {
    if (stat.name.match(/[A-Z]+?\-\d+?\.csv/gi)) {
      var filename = root + '/' + stat.name;
      files.push(fs.createReadStream(filename));
    }
    next();
  });

  csvWalker.on('end', function () {
    var left = files.length;
    files.forEach(function (file) {
      var csvConverter = new csvtojson({constructResult: true});

      csvConverter.on('end_parsed', function (data) {
        if (callback) {
          if (--left === 0) {
            // Ensure rows are unique.
            ret = _.uniq(ret, function (x) {
              return JSON.stringify(x);
            });
            // Required fields.
            ret = _.filter(ret, function (x) {
              return (
                 x['Date']    !== undefined
              && x['Balance'] !== undefined
              && x['Date']    !== 'Date'
              && x['Balance'] !== 'Balance'
                );
            });
            callback(ret);
          } else
            ret = ret.concat(data);
        }
      });

      file.pipe(csvConverter);
    });
  });

}

This assumes that data passed in by the csv converter on 'end_parsed' is always an array. It combines the data from all files and then runs the filters on the data before executing the callback.