Iam playing with AWS Lambda
with Node.js
. I have created a lambda function and configure it with S3 event
.
I want to extract zip file which is uploaded on S3 and upload extracted file to another folder on same bucket.
I am getting the bucket and file information from following code but after that I don't know how to extract and upload to s3.
Any suggestion or chunk of code will be helpful for me.
'use strict';
console.log('Loading function to get all latest object from S3 service');
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.handler = (event, context, callback) => {
console.log('Received event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key,
};
s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
callback(message);
} else {
console.log('CONTENT TYPE:', data.ContentType);
callback(null, data);
}
});
};
You can use zlib to unzip the buffer that you get from s3.
s3.getObject(params, (err, data) => {
if (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
callback(message);
} else {
zlib.gunzip(data.Body, function (err, result) {
if (err) {
console.log(err);
} else {
var extractedData = result;
s3.putObject({
Bucket: "bucketName",
Key: "filename",
Body: extractedData,
ContentType: 'content-type'
}, function (err) {
console.log('uploaded file: ' + err);
});
}
});
}
});
I think the above function will help you.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With