Ok, this is not going to be easy to explain, but I'll give it my best..
We have a file sharing solution serving (small images to big 3d drawings/videos(++5-10GB) running on a strange network setup (IT Security request).
We have the internal zone (LAN) with a NODE.js server:
We then have a external zone ( DMZ ) where the proxy and web-server lives.
So, we need to transfer a file to the client connected to the web-server. The library I've used now is BinaryJS witch works good, but we have a extreamly slow transfer speed of about 12MB/s and this drops when new clients are connecting (usuall arround 4-6MB/s).
Request flow: - Client (browser) -> Web-Server over https
From testing it seems like we loose 50% of the transfer speed for each server (backend, proxy, web) we go through and i cant figure out why.. We have enough bandwidth and disk speed to get full speed of the Gigabit connected clients. Also the memory and CPU are not working that hard.
Code: Backend (BASIC) - DB Query's of filepath etc is removed :
client = BinaryClient('wss://'+config.proxy+':'+config.port+'/binary');
client.on('stream', function(msgStream, msgMeta){
console.log("Got stream with meta: ", msgMeta);
var stream = require('fs').createReadStream(msgMeta.filepath);
stream.pipe(msgStream);
msgStream.on('error', function(e){
console.log("Bin stream failed: ", e);
});
msgStream.on('close', function(){
console.log("Stream closed")
});
stream.on('end', function(){
console.log("Ending file stream of file: ", msgMeta.meta.itemID);
msgStream.end();
});
}
})
Proxy:
var binServer = BinaryServer({port:8001});
binServer.on('connection', function(client){
client.on('stream', function(stream, meta){
if(meta.register == true){
if(meta.who == "master"){
config.masterBinary = client;
}
} else {
// Its a stream request.
if(meta.what == "GET"){
config.masterBinary.createStream(meta).pipe(stream).on('close', function(){
console.log("CLOSED");
}).on('end', function(){
console.log("ENDED");
});
}
}
})
});
Web-Server:
client = binaryClient('wss://'+config.proxy+':'+config.port+'/binary');
client.on('open', function(){
ready = true;
})
client.on('close', function(){
ready = false;
})
exports.get = function(file, res, cb){
// This get called from the Express request.. Client will wait for the stream to start
console.log("Sending request to backend: ", file);
if(file.itemID || file.requestID){
client.createStream({filepath:"C:/SomePathToAFile", register:false, what:"GET"}).pipe(res);
} else {
cb("Parameter not present", false);
}
}
I've been wondering if is should try to write my own TCP-server and clients to better fit what we are trying to do.. Unfortunate I'm quite new to Node and have not figure out how to implement the EventEmitter into TCP so I can send data with meta for identification..
Is there a better way or have I completly missunderstood the BinaryJS server?
Thanks for reading!