I am using loopback component storage for uploading image to server.
I want to convert each image that been uploaded through server to a thumbnail view and save it to the container. Initially I was using local file storage to store file and everything was working fine.
In file storage I was using "quickthumb" for converting image to thumbnail and then saving the original image and thumbnail size image to the container.
But now I want to store my image using amazon S3 with loopback component storage. By following the documentation I can easily upload an image to the amazon S3 bucket. But I can't figure out how to resize image to thumbnail view and store different versions of image along with original image on Amazon S3 server.
Here is what I was doing in when i was implementing it with file storage.
Now for converting it images to Thumbnail size
Using the quickthumb
Here is how i was use it with loopback.
common/models/container.js
module.exports = function(Container) {
var qt = require('quickthumb');
Container.afterRemote('upload', function(ctx, res, next) {
var file = res.result.files.file[0];
var file_path = "./server/storage/" + file.container + "/" + file.name;
var file_thumb_path = "./server/storage/" + file.container + "/thumb/" + file.name;
qt.convert({
src: file_path,
dst: file_thumb_path,
width: 100
}, function (err, path) {
});
next();
});
};
But now to implement resize the image before uploading it to the S3 server I need some express
like syntax like req.files.image
but this is not possible in loopback?
Please help.
You can accomplish what you are trying, but it requires three steps.
First, use the loopback-component-storage to upload the file locally. Once you have it there you can create as many adjustments to the image as you like. But you will want to give the image some sort of unique name to avoid collisions.
Secondly, use the AWS Node SDK to push those new images to your S3 bucket.
Lastly, delete the local files to clean up after yourself.
It's pretty simple. Check out this recipe I put together to solve such problems:
https://github.com/dashby3000/recipe-s3-image-uploader
Cheers!
Dennis
Thanx. @Dennis for answering. I found myself other way too and I am posting it.
Container.js
var fs = require('fs');
//Formidable to modify the form obtained as requests.
var IncomingForm = require('formidable');
//Imager for sending and modifying image to amazon
var Imager = require('imager');
Container.beforeRemote('upload', function(ctx, modelInstance, next){
var app = Container.app;
var container = ctx.req.params.container;
//IF u have large image then. use this to avoid timeout..
ctx.req.connection.setTimeout(16000);
//Checking if the container name is valid or not..
var Customer = app.models.Customer;
//http://apidocs.strongloop.com/loopback/#persistedmodel-exists
//Now manually uploading to the provider and first converting the file to the thumbnail..
upload(app, ctx.req, ctx.res, function(err, data){
console.log("Image loaded successfully..");
});
});//beforeRemote
//For handling the upload
var upload = function(app, req, res, options, cb) {
var storageService = app.dataSources.presImage.connector;
if (!cb && 'function' === typeof options) {
cb = options;
options = {};
}
if (storageService.getFilename && !options.getFilename) {
options.getFilename = storageService.getFilename;
}
if (storageService.acl && !options.acl) {
options.acl = storageService.acl;
}
if (storageService.allowedContentTypes && !options.allowedContentTypes) {
options.allowedContentTypes = storageService.allowedContentTypes;
}
if (storageService.maxFileSize && !options.maxFileSize) {
options.maxFileSize = storageService.maxFileSize;
}
return handler(app, storageService.client, req, res, options, cb);
}
//Using handler for form parsing..
var handler = function (app, provider, req, res, options, cb) {
if (!cb && 'function' === typeof options) {
cb = options;
options = {};
}
if (!options.maxFileSize) {
options.maxFileSize = defaultOptions.maxFileSize;
}
var form = new IncomingForm(options);
var fields = {};
var files = [];
form
.on('field', function(field, value) {
fields[field] = value;
})
.on('file', function(field, file) {
//Now upload the file to the amazon server..
uploadToCloud(app, fields.container, res, cb);
})
.on('end', function(name, file) {
console.log("END-> File fetched from user phone.\n");
});
form.parse(req);
}
var uploadToCloud = function(app, container, res, callback ){
var fileName, extension;
var time = new Date();
time = time.getTime();
fileName = '' + container + '_' + time;
imagerConfig = {
variants: {
items: {
rename: function (filename) {
return fileName + "." + extension;
},
resize: {
thumb: "200x200",
original: "100%"
}
}
},
storage: {
S3: {
key: 'AMAZON KEY',
secret: 'AMAZON SECRET',
bucket: 'YOUR BUCKET NAME HERE',
storageClass: 'REDUCED_REDUNDANCY',
secure: false
}
},
debug: true
}
CHECK IF FILE IS IMAGE ONLY..
var pattern = /^image\/(.+)$/;
extension = pattern.exec(path.type);
try{
if(extension.length){
extension = extension[1];
}else{
throw "Error. Only image type file is permitted";
}
}catch(err){
throw "Error getting extension of file..";
}
if(!extension || extension == 'jpeg'){
extension = "jpg";
}
imager = new Imager(imagerConfig, 'S3') // or 'S3' for amazon
imager.upload([path], function (err, cdnUri, files) {
// do your stuff
if(err) throw err;
console.log("Successfully saved to the amazon server..");
var fileArr = [];
for(var i=0; i < files.length; i++){
//Preparing the order object..
fileArr.push({
name: files[i],
container: container
});
}
//Sending the result fast..
//Dont wait for image to get upload..
res.send({
result:{
files:{
file:fileArr
}
}
}); //res.status
callback();
//Now deleting the original file here...path is "os.tmp()"
}, 'items');
}//uploadToCloud..
Datasource.json
{
"db": {
"host": "",
"port": 0,
"database": "",
"password": "",
"name": "db",
"connector": "memory",
"user": ""
},
"Image": {
"name": "Image",
"connector": "loopback-component-storage",
"provider": "amazon",
"key": "YOUR AMAZON KEY",
"keyId": "YOUR AMAZON KEY ID",
"maxFileSize": "15728640"
}
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With