Why is Node JS blocking an asynchronous file operation?

I was under the impression that although Node JS is single threaded, the file io was asynchronous.

var express = require('express');
var http = require('http');
var fs = require('fs');
var app = express();

var server = app.listen(1337, function() {
    console.log('Listening on port %d', server.address().port);
});

app.get('/home', function(req, res){
  console.log("/home")

  res.sendFile('C:\\Users\\Owner\\AppData\\Roaming\\npm\\test.html');
  //var file = fs.createReadStream('C:/Users/Owner/AppData/Roaming/npm/test.html');
  //file.on("open", function(){
  //   file.pipe(res);
  //});
});

app.get('/home2', function(req, res){
  console.log("/home2")

  res.sendFile('C:\\Users\\Owner\\AppData\\Roaming\\npm\\test2.html');
  //var file = fs.createReadStream('C:/Users/Owner/AppData/Roaming/npm/test.html');
  //file.on("open", function(){
  //   file.pipe(res);
  //});
});

app.get('/music', function(req, res){
  console.log("/music")
  //res.sendFile('C:\\Users\\Owner\\AppData\\Roaming\\npm\\01 Give Life Back To Music.mp4');
  var readStream = fs.createReadStream('C:\\Users\\Owner\\AppData\\Roaming\\npm\\01 Give Life Back To Music.mp4');
  console.log("/afterRead");
  readStream.on('data', function(data) {
        var flushed = res.write(data);
        // Pause the read stream when the write stream gets saturated
        console.log( 'streaming data' );
        if(!flushed){
            readStream.pause();
        }
    });

    res.on('drain', function() {
        // Resume the read stream when the write stream gets hungry 
        readStream.resume();
    });

  readStream.on('end', function () {
    res.end();
    console.log('end');
  });
});

app.get('/music2', function(req, res){
  console.log("/music2")
  //res.sendFile('C:\\Users\\Owner\\AppData\\Roaming\\npm\\01 Give Life Back To Music.mp4');
  var readStream = fs.createReadStream('C:\\Users\\Owner\\AppData\\Roaming\\npm\\01 Give Life Back To Music.mp4');
  console.log("/afterRead");
  readStream.on('data', function(data) {
        var flushed = res.write(data);
        // Pause the read stream when the write stream gets saturated
        console.log( 'streaming data' );
        if(!flushed){
            readStream.pause();
        }
    });

    res.on('drain', function() {
        // Resume the read stream when the write stream gets hungry 
        readStream.resume();
    });

  readStream.on('end', function () {
    res.end();
    console.log('end');
  });
});

Above is the server code, here is my HTML:

<html>
    <script>

    </script>
    <body>
        <video width="320" height="240" controls>
          <source src="http://localhost:1337/music1" type="video/mp4">
          Your browser does not support the video tag.
        </video>
    </body>
    </html>

<html>
<script>

</script>
<body>
    <video width="320" height="240" controls>
      <source src="http://localhost:1337/music2" type="video/mp4">
      Your browser does not support the video tag.
    </video>
</body>
</html>

It would be my expectation that I should be able to call localhost:1337/music more than once, but when I call localhost:1337/home in two tabs in Google Chrome (or Firefox, or Internet Explorer 11) the second page doesn't load the audio until all the bytes have been transferred for the first request to localhost:1337/music. The code is asynchronous, and I know it isn't blocking because if it were blocking, the second request .../home2 shouldn't serve up the file.

Does someone know why asynchronous code is blocking? I verified via the developer tools in Chrome that /music and /music2 are being called (from the browser perspective). I also ran into the same problem not using Express, so I am sure it isn't the framework.

  • Also, I have console logging. When the second page loads, I see in the console a second /home along with several "streaming data" because it is streaming data for the first page. Chrome says the /music request went out, but the console does not print anything implying the function wasn't even executed - implying something else is blocking it.

Chrome will only download a single copy of a file at a time (i.e. if you download a really big file, subsequent requests to that same file get queued, even across tabs). The development environment says the request is pending, but the pending is really a limitation by Chrome and not by the Node JS server. The file download is specifically tied to the URL as a key, that is why I was able to have different URLs download the same file, and also why I could make other requests to the server while this file was downloading.

You might be running into your browser's connection limit. Try upping the limit (if you can, I think you can do this in Firefox at least) and try again.

Also, you can just pipe the file to the response instead of manually listening for 'data' and 'end'. For example:

app.get('/music2', function(req, res){
  console.log("/music2")
  //res.sendFile('C:\\Users\\Owner\\AppData\\Roaming\\npm\\01 Give Life Back To Music.mp4');
  var readStream = fs.createReadStream('C:\\Users\\Owner\\AppData\\Roaming\\npm\\01 Give Life Back To Music.mp4');
  console.log("/afterRead");
  readStream.pipe(res);
});

Seems that createReadStream is blocking file. I tried this code

var http = require('http');
var fs = require('fs');
var qs = require('querystring');

http.createServer(function (req, res) {
    var str = req.url.split('?')[1];
    var query = qs.parse(str);

    var readStream = fs.createReadStream('./response' + (query.q || '') + '.json');
    readStream.on('data', function(data) {
        var flushed = res.write(data);
        console.log( 'streaming data' );
        if(!flushed){
            readStream.pause();
        }
    });

    res.on('drain', function() {
        readStream.resume();
    });
}).listen(1337, '127.0.0.1');

and opened two tabs with localhost and localhost/?q=1 and it read two separate files just fine, but if you are trying to read one file by two processes, second request hang.