stream response from nodejs request to s3
You want to use the response
object if you're manually listening for the response stream:
var req = require('request');
var s3 = new AWS.S3({params: {Bucket: myBucket, Key: s3Key}});
var imageStream = req.get(url)
.on('response', function (response) {
if (200 == response.statusCode) {
s3.upload({Body: response, ACL: "public-read", CacheControl: 5184000}, function (err, data) { //2 months
console.log(err,data);
});
}
});
});
Since I had the same problem as @JoshSantangelo (zero byte files on S3) with [email protected] and [email protected], let me add an alternative solution using Node's own http
module (caveat: simplified code from a real life project and not tested separately):
var http = require('http');
function copyToS3(url, key, callback) {
http.get(url, function onResponse(res) {
if (res.statusCode >= 300) {
return callback(new Error('error ' + res.statusCode + ' retrieving ' + url));
}
s3.upload({Key: key, Body: res}, callback);
})
.on('error', function onError(err) {
return callback(err);
});
}
As far as I can tell, the problem is that request
does not fully support the current Node streams API, while aws-sdk
depends on it.
References:
- request issue about the
readable
event not working right - generic issue for "new streams" support in request
- usage of the
readable
event in aws-sdk