node.js & S3 / Write to s3 with knox

I'm trying to write to S3 with knox, by the following code:

var knox = require('knox');

var client = knox.createClient({
    key: 'key'
    , secret: 'pass'
    , bucket: S3_BUCKET
});

fs.stat("/opt/files/" + url, function(err, stats) {
     if (stats != null && stats.size != 0){
           var req = client.put(url, {
      'Content-Length': stats.size
        });
        req.on('error' ,function (err){
            console.log(err);
        })
        var readstr = fs.createReadStream("/opt/files/" + url);
        readstr.pipe(req);
        readstr.on('error', function (err){
          console.log(err);
        })

It gives me the following error for big files (I check file with 900MB):

{ [Error: write ECONNRESET] code: 'ECONNRESET', errno: 'ECONNRESET', syscall: 'write' }

The certificates are O.K, I success to log in with the key & secret access key to Amazon and other npm-packages. In addirion, I success to upload small files with Knox.

I know that it connection error, but I don't understand why it happens and how can I solve it.

You are probably hitting the "large file" limit that exists on Amazon's side.

According to their FAQ, for objects larger than 100MB, users should use "multipart upload".

For this you could use the "know-mpu" module and your example would become

var knox = require('knox');
var MultiPartUpload= require('knox-mpu');

var client = knox.createClient({
    key: 'key'
    , secret: 'pass'
    , bucket: S3_BUCKET
});

var upload = new MultiPartUpload(
        {
            client: client,
            objectName: url,
            file: '/opt/files' + url
        },
        function(err, body) {
           console.log(body);
        }
    );

You aren't doing anything with response. Try to add listener for a 'response' event on the request like

req.on('response', function(res){ // ... });