My node js code opens a local png file from my server, tmp.png, and then tries to save it amazon S3. I keep running into problems and I suspect it has something to do with the encoding. The only way it work is with base64 encoding (which I don't want for my photos).
fs = require('fs');
var awssum = require('awssum');
var amazon = awssum.load('amazon/amazon');
var s3Service = awssum.load('amazon/s3');
var s3 = new s3Service('mykey', 'mysecret', 'account', amazon.US_WEST_1);
fs.readFile('./tmp.png', function (err, data){
if(err){
console.log("There was an error opening the file");
} else {
s3.PutObject({
BucketName : 'my-bucket',
ObjectName : 'tmp.png',
ContentType : 'image/png',
ContentLength : data.length,
Body : data,
}, function(err, data) {
if(err){
console.log("There was an error writing the data to S3:");
console.log(err);
} else {
console.log("Your data has been written to S3:");
console.log(data);
}
});
}
});
Obviously my-bucket is actually my unique bucket name. The message I get back from amazon is a request timeout:
Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.
Looks like a found an example in the docs that does what I need it to. The key was to use fs.stat for the file size and fs.createReadStream to read in the file:
// you must run fs.stat to get the file size for the content-length header (s3 requires this)
fs.stat(path, function(err, file_info) {
if (err) {
inspect(err, 'Error reading file');
return;
}
var bodyStream = fs.createReadStream( path );
console.log(file_info.size);
var options = {
BucketName : 'my-bucket',
ObjectName : 'test.png',
ContentType : 'image/png',
ContentLength : file_info.size,
Body : bodyStream
};
s3.PutObject(options, function(err, data) {
console.log("\nputting an object to my-bucket - expecting success");
inspect(err, 'Error');
inspect(data, 'Data');
});
});