Node.js: Read large text file only partialy

I have to read a very large csv file (> 80MB and growing).

I usually only have to parse the last 1% of the file. But getting to that part takes a few minutes. Is there a way that I only start reading on line N? Or alternatively could I read the stream from end to start?

I'm currently using fast-csv to read the file:

    // convert csv into postgres copy file
    csv.fromPath(filepath, {
        headers: false
    }).transform(function(data) {
        // check if record meets condition 
        var dt = parseInt(data[0]);
        var date = new Date(dt * 1000);
        var mom = moment(date);
        if (mom.isAfter('2014-01-01 00:00')) {  
            // transform data and return object
            return transform(data);
        }
        return null;
    }).pipe(csv.createWriteStream({
        headers: true
    })).pipe(fs.createWriteStream(outpath, {    
        encoding: "utf8"
    })).on('finish', function() { 
        // do postgres import
    });

Using a combination of node's fs.stat, fs.open, fs.read, you could find the size of the file and just read the last 1% into a buffer:

var fs = require('fs');
var filename = 'csv.csv';

fs.stat(filename, function(err, stat) {

    if(err) throw err;

    var bytesToRead = Math.ceil(0.01 * stat.size); // last 1%
    var startingPosition = stat.size - bytesToRead;
    var readBuffer = new Buffer(bytesToRead);

    fs.open(filename, 'r', function(err, fd){

        if(err) throw err;

        fs.read(fd, readBuffer, 0, bytesToRead, startingPosition, 
            function(err, bytesRead){
                if(err) throw err;
                console.log(readBuffer.toString());
            });
    });

});

You couldn't start reading from line N because you would have to read it all to know where the newline characters are.