I'm trying to upload all files within my directory to my S3 bucket using NodeJS. I'm able to upload one file at a time if I explicitly give the file path + literal string for the Key:
field.
Below is the script I'm using:
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
// reg ex to match
var re = /\.txt$/;
// ensure that this file is in the directory of the files you want to run the cronjob on
// ensure that this file is in the directory of the files you want to run the cronjob on
fs.readdir(".", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}
var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length
if ( numFiles ) {
// Read in the file, convert it to base64, store to S3
for( i = 0; i < numFiles; i++ ) {
var fileName = matches[i]
fs.readFile(fileName, function (err, data) {
if (err) { throw err }
// Buffer Pattern; how to handle buffers; straw, intake/outtake analogy
var base64data = new Buffer(data, 'binary');
var s3 = new AWS.S3()
s3.putObject({
'Bucket': 'noonebetterhaventakenthisbucketnname',
'Key': fileName,
'Body': base64data,
'ACL': 'public-read'
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded, ', fileName)
})
})
}
}
})
It produces this error for each file attempted to upload to S3:
These are the files you have [ 'test.txt', 'test2.txt' ]
{ '0': null,
'1': { ETag: '"2cad20c19a8eb9bb11a9f76527aec9bc"' } }
Successfully uploaded, test2.txt
{ '0': null,
'1': { ETag: '"2cad20c19a8eb9bb11a9f76527aec9bc"' } }
Successfully uploaded, test2.txt
edit: updated using a variable name to allow key to be read instead of matches[i]
Why does it only upload test2.txt
, and how do I get it to upload each file within my matches
variable?
Referenced this Asynchronously reading and caching multiple files in nodejs to arrive at a solution.
tl;dr scope issue - need to wrap variables in closure; can do this by creating a function for the readFile
and s3.putObject
and calling that within the for loop.
var AWS = require('aws-sdk'),
fs = require('fs');
// For dev purposes only
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });
var s3 = new AWS.S3()
function read(file) {
fs.readFile(file, function (err, data) {
if (err) { throw err }
// Buffer Pattern; how to handle buffers; straw, intake/outtake analogy
var base64data = new Buffer(data, 'binary');
s3.putObject({
'Bucket': 'noonebetterhaventakenthisbucketnname',
'Key': file,
'Body': base64data,
'ACL': 'public-read'
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded, ', file)
})
})
}
// reg ex to match
var re = /\.txt$/;
// ensure that this file is in the directory of the files you want to run the cronjob on
fs.readdir(".", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}
var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length
if ( numFiles ) {
// Read in the file, convert it to base64, store to S3
for( i = 0; i < numFiles; i++ ) {
read(matches[i])
}
}
})
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With