Loaded around 3K objects (files) to S3. There is an event trigger an each file that is loaded on to that S3 bucket.
Lambda is receiving event trigger only for around 300 objects. If I retry (move back from S3 and put it back to S3) it generates event for another 400 objects rest of the events did not even reach lambda.
What am I missing here, and how can I scale for any number of objects created?
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3();
var kinesis = new aws.Kinesis();
var sns = new aws.SNS();
var config = require('./config.js');
var logError = function(errormsg) {
sns.publish({
TopicArn: config.TopicArn,
Message: errormsg
}, function(err, data) {
if (err) {
console.log(errormsg);
}
});
};
exports.handler = function(event, context, callback) {
var readS3andSendtoKinesis = function(record, index, cb) {
var params = {
Bucket: record.s3.bucket.name,
Key: record.s3.object.key
};
console.log('Received File: ' + record.s3.object.key);
s3.getObject(params, function(err, data) {
if (!err) {
var kinesisParams = {
Data: data.Body.toString('utf8'),
PartitionKey: config.PartitionKey,
StreamName: config.StreamName
};
kinesis.putRecord(kinesisParams, function(err, data) {
if (err) {
// Handle Kinesis Failures
logError(JSON.stringify(err, null, 2));
}
cb(null, 'done');
});
} else {
// Handle S3 Failures
logError(JSON.stringify(err, null, 2));
cb(null, 'done');
}
});
};
async.eachOfLimit(event.Records, 1, readS3andSendtoKinesis, function(err) {
callback(null, 'Done');
});
};
Since everyone recommended to look at cloudwatch, sharing the cloudwatch metrics here for the associated lambda,
We found the root cause seems to be failing on the other side of the resource. S3 Triggers happening and could not scale to the huge triggers it receives.
To solve,
Return the S3 Lambda Trigger as quickly as possible, delaying will cause issues.
If you take too much time to work on the business logic inside trigger, in our case we were reading from S3 and writing to the stream. Instead we just wrote the location of the S3 and read from S3 on the receiving end.
Hope it helps.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With