I currently upload single objects to S3 using like so:
var options = { Bucket: bucket, Key: s3Path, Body: body, ACL: s3FilePermissions }; S3.putObject(options, function (err, data) { //console.log(data); });
But when I have a large resources folder for example, I use the AWS CLI tool.
I was wondering, is there a native way to do the same thing with the aws sdk (upload entire folders to s3)?
To upload folders and files to an S3 bucketSign in to the AWS Management Console and open the Amazon S3 console at https://console.aws.amazon.com/s3/ . In the Buckets list, choose the name of the bucket that you want to upload your folders or files to. Choose Upload.
Old-school recursive way I whipped up in a hurry. Only uses core node modules and standard AWS sdk.
var AWS = require('aws-sdk'); var path = require("path"); var fs = require('fs'); const uploadDir = function(s3Path, bucketName) { let s3 = new AWS.S3(); function walkSync(currentDirPath, callback) { fs.readdirSync(currentDirPath).forEach(function (name) { var filePath = path.join(currentDirPath, name); var stat = fs.statSync(filePath); if (stat.isFile()) { callback(filePath, stat); } else if (stat.isDirectory()) { walkSync(filePath, callback); } }); } walkSync(s3Path, function(filePath, stat) { let bucketPath = filePath.substring(s3Path.length+1); let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) }; s3.putObject(params, function(err, data) { if (err) { console.log(err) } else { console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName); } }); }); }; uploadDir("path to your folder", "your bucket name");
Special thanks to Ali from this post with helping get the filenames
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With