The goal is to have have AWS lambda copy a specific jpg from a URL each day to an S3 bucket. This seems like a super simple lambda function but its just not working. I have this setup as a local NPM project then upload the zipped files in the AWS lambda console. The code below runs without error but does not copy the image. Any help would be greatly appreciated.
"use strict";
const AWS = require("aws-sdk");
const Jimp = require("jimp");
const s3 = new AWS.S3();
const imageType = "image/jpeg";
const bucket = 'mybucket';
exports.handler = (event, context, callback) => {
let objectKey = 'myimage.jpg';
Jimp.read('sampleURL.com/image.jpg')
.then(image => {
s3.putObject({
Bucket: bucket,
Key: objectKey,
Body: image,
ContentType: imageType
})
})
.catch(err => {
// Handle an exception.
});
};
aws cloudwatch logs
2019-10-24T12:48:23.105Z bac7d80e-5544-4ea5-ae12-478281338389 INFO { Error: Could not find MIME for Buffer <null>
at Jimp.parseBitmap (/var/task/node_modules/@jimp/core/dist/utils/image-bitmap.js:120:15)
at Jimp.parseBitmap (/var/task/node_modules/@jimp/core/dist/index.js:506:32)
at /var/task/node_modules/@jimp/core/dist/index.js:448:15
at /var/task/node_modules/@jimp/core/dist/index.js:176:14
at /var/task/node_modules/@jimp/core/dist/request.js:66:9
at IncomingMessage.<anonymous> (/var/task/node_modules/phin/lib/phin.compiled.js:1:2100)
at IncomingMessage.emit (events.js:203:15)
at IncomingMessage.EventEmitter.emit (domain.js:448:20)
at endReadableNT (_stream_readable.js:1145:12)
at process._tickCallback (internal/process/next_tick.js:63:19) methodName: 'constructor' }
END RequestId: bac7d80e-5544-4ea5-ae12-478281338389
REPORT RequestId: bac7d80e-5544-4ea5-ae12-478281338389 Duration: 612.63 ms Billed Duration: 700 ms Memory Size: 128 MB Max Memory Used: 97 MB Init Duration: 557.69 ms
Here's an example of how to stream a file from an HTTP URL into S3. It uses promises/async/await rather than callbacks and it dispenses with the Jimp package, about which I know little, in favor of the more traditional fetch API:
Note: if you do not explicitly supply a content-type when uploading to S3 then it will be set to application/octet-stream which will be problematic when clients are downloading the object. So this code determines the content type of the file first, and sets it when streaming to S3.
const AWS = require('aws-sdk');
const fetch = require('node-fetch');
const stream = require('stream');
const s3 = new AWS.S3();
const uploadStream = ({ Bucket, Key, ContentType }) => {
const pass = new stream.PassThrough();
return {
writeStream: pass,
promise: s3.upload({ Bucket, Key, ContentType, Body: pass }).promise(),
};
}
const uploadFetch = async ({ url, Bucket, Key, ContentType }) => {
const response = await fetch(url);
const { writeStream, promise } = uploadStream({Bucket, Key, ContentType});
response.body.pipe(writeStream);
return promise;
}
exports.handler = async (_event, _context) => {
const source_jpeg = {
Key: 'audi.jpeg',
Bucket: 'mybucket',
url: 'https://upload.wikimedia.org/wikipedia/commons/0/08/Audi_A3_2015.jpeg',
};
// HEAD the source image to get content type
const rc_head = await fetch(source_jpeg.url, {method: 'HEAD'});
const content_type = rc_head.headers.get('content-type');
console.log('head:', rc_head.status, rc_head.statusText, content_type);
try {
// GET the source image and stream it to S3
const parms = {...source_jpeg, ContentType: content_type};
const rc_upload = await uploadFetch(parms);
console.log('get/upload jpeg:', rc_upload);
} catch(e) {
console.log(e);
}
};
Also, be sure that your Lambda function is configured with a reasonable timeout (the default timeout is 3 seconds).
If this helps anyone else the image needed to be written to a buffer. The following line fixed it:
const buffer = await image.getBufferAsync(imageType);
Then buffer is used for the S3 Body parameter. so the full script is:
"use strict";
const AWS = require("aws-sdk");
const Jimp = require("jimp");
const s3 = new AWS.S3();
const imageType = "image/jpeg";
const bucket = 'bucketxzy';
exports.handler = async (event, context) => {
let objectKey = 'sampleimage.jpeg';
const image = await Jimp.read('https://www.sampleurl.com/sampleimage.jpg/');
const buffer = await image.getBufferAsync(imageType);
return s3.putObject({
Bucket: bucket,
Key: objectKey,
Body: buffer,
ContentType: imageType
}).promise();
};
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With