I'm trying to find some example code that utilizes node.js, Express, and knox.
The docs for Knox only give clear examples of how to upload a file already stored in the file system. https://github.com/learnboost/knox#readme
Additionally, there a number of simple tutorials (even in Express itself) on how to upload files directly to express and save to the file system.
What I'm having trouble finding is an example that lets you upload a client upload to a node server and have the data streamed directly to S3 rather than storing in the local file system first.
Can someone point me to a gist or other example that contains this kind of information?
All of the previous answers involve having the upload pass through your node.js server which is inefficient and unnecessary. Your node server does not have to handle the bandwidth or processing of uploaded files whatsoever because Amazon S3 allows uploads direct from the browser.
Have a look at this blog post: http://blog.tcs.de/post-file-to-s3-using-node/
I have not tried the code listed there, but having looked over it, it appears solid and I will be attempting an implementation of it shortly ad will update this answer with my findings.
Here is an example of streaming directly to s3 without ever touching your hard drive, using multiparty and knox:
var http = require('http')
, util = require('util')
, multiparty = require('multiparty')
, knox = require('knox')
, Batch = require('batch')
, PORT = process.env.PORT || 27372
var s3Client = knox.createClient({
secure: false,
key: process.env.S3_KEY,
secret: process.env.S3_SECRET,
bucket: process.env.S3_BUCKET,
});
var Writable = require('readable-stream').Writable;
util.inherits(ByteCounter, Writable);
function ByteCounter(options) {
Writable.call(this, options);
this.bytes = 0;
}
ByteCounter.prototype._write = function(chunk, encoding, cb) {
this.bytes += chunk.length;
cb();
};
var server = http.createServer(function(req, res) {
if (req.url === '/') {
res.writeHead(200, {'content-type': 'text/html'});
res.end(
'<form action="/upload" enctype="multipart/form-data" method="post">'+
'<input type="text" name="path"><br>'+
'<input type="file" name="upload"><br>'+
'<input type="submit" value="Upload">'+
'</form>'
);
} else if (req.url === '/upload') {
var headers = {
'x-amz-acl': 'public-read',
};
var form = new multiparty.Form();
var batch = new Batch();
batch.push(function(cb) {
form.on('field', function(name, value) {
if (name === 'path') {
var destPath = value;
if (destPath[0] !== '/') destPath = '/' + destPath;
cb(null, destPath);
}
});
});
batch.push(function(cb) {
form.on('part', function(part) {
if (! part.filename) return;
cb(null, part);
});
});
batch.end(function(err, results) {
if (err) throw err;
form.removeListener('close', onEnd);
var destPath = results[0]
, part = results[1];
var counter = new ByteCounter();
part.pipe(counter); // need this until knox upgrades to streams2
headers['Content-Length'] = part.byteCount;
s3Client.putStream(part, destPath, headers, function(err, s3Response) {
if (err) throw err;
res.statusCode = s3Response.statusCode;
s3Response.pipe(res);
console.log("https://s3.amazonaws.com/" + process.env.S3_BUCKET + destPath);
});
part.on('end', function() {
console.log("part end");
console.log("size", counter.bytes);
});
});
form.on('close', onEnd);
form.parse(req);
} else {
res.writeHead(404, {'content-type': 'text/plain'});
res.end('404');
}
function onEnd() {
throw new Error("no uploaded file");
}
});
server.listen(PORT, function() {
console.info('listening on http://0.0.0.0:'+PORT+'/');
});
example taken from https://github.com/superjoe30/node-multiparty/blob/master/examples/s3.js
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With