Currently I was able to optimise performance quite a bit, but it is still somewhat slow :/
LATEST EDIT:
My current solution (the fastest atm (but still slow) and keeps order):
server
router.post('/images', function(req, res, next) {
var image = bucket.file(req.body.image);
image.download(function(err, contents) {
if (err) {
console.log(err);
} else {
var resultImage = base64_encode(contents);
var index = req.body.index;
var returnObject = {
image: resultImage,
index: index
}
res.send(returnObject);
}
});
});
client query
$scope.getDataset = function() {
fb.orderByChild('id').startAt(_start).limitToFirst(_n).once("value", function(dataSnapshot) {
dataSnapshot.forEach(function(childDataSnapshot) {
_start = childDataSnapshot.child("id").val() + 1;
var post = childDataSnapshot.val();
var image = post.image;
var imageObject = {
image: image,
index: position
};
position++;
$.ajax({
type: "POST",
url: "images",
data: imageObject,
}).done(function(result) {
post.image = result.image;
$scope.data[result.index] = post;
$scope.$apply();
firstElementsLoaded = true;
});
})
});
};
client HTML
<div ng-controller="ctrl">
<div class="allcontent">
<div id="pageContent" ng-repeat="d in data track by $index"><a href="details/{{d.key}}" target="_blank"><h3 class="text-left">{{d.title}}<a href="../users/{{d.author}}"><span class="authorLegend"><i> by {{d.username}}</i></span></a></h3>
</a>
<div class="postImgIndex" ng-show="{{d.upvotes - d.downvotes > -50}}">
<a href="details/{{d.key}}" target="_blank"><img class="imgIndex" ng-src="data:image/png;base64,{{d.image}}"></a>
</div>
<div class="postScore">{{d.upvotes - d.downvotes}} HP</div>
</div>
</div>
</div>
Your solution is slow because you are downloading the images from your Cloud Storage and serving them on your own server. You get a delay on the download and upload, a ~33% overhead using base64-encoded data, plus your server is strained in delivering images instead of focusing on delivering your website content.
As pointed by many on the comments, the best-practice solution is to use the public URL for the images like so:
function getPublicUrl (filename) {
return "https://storage.googleapis.com/${CLOUD_BUCKET}/${filename}";
}
By using the public URL, you are directly serving from Cloud Storage leveraging Google’s global serving infrastructure. And the application does not have to respond to requests for images, freeing up CPU cycles for other requests.
If you do not want bots to crawl your images using the above method, Google recommends using a robots.txt file to block access to your images.
base64_encode(contents)
may cost a lot CPU, your logic seems to repeatedly doing this. This is guessing, the true bottleneck you have to find it by your selfOptimization Data Collect - Server Side, which operation took too much time
router.post('/images', function(req, res, next) {
var d = new Date()
var image = bucket.file(req.body.image);
image.download(function(err, contents) {
console.log('download:' + new Date() - d)
if (err) {
console.log(err);
} else {
var resultImage = base64_encode(contents);
console.log('base64_encode:' + new Date() - d)
var index = req.body.index;
var returnObject = {
image: resultImage,
index: index
}
res.send(returnObject);
}
});
});
Optimization Data Collect - Client Side ()
Spare the use of base64_encode(contents)
$scope.getDataset = function() {
fb.orderByChild('id').startAt(_start).limitToFirst(_n).once("value", function(dataSnapshot) {
dataSnapshot.forEach(function(childDataSnapshot, index) {
_start = childDataSnapshot.child("id").val() + 1;
var post = childDataSnapshot.val();
getImageBase64(post.image)
.then((image) => {
post.image = image;
$scope.data[index] = post;
$scope.$apply();
firstElementsLoaded = true;
})
})
});
function getImageBase64(image1) {
//without help of server, this will make your app faster
//network reduced
//server calculation reduced
if (CanIUseBucktAndBase64Here) {
return new Promise((reslove, reject) {
var image = bucket.file(image1);
image.download(function(err, contents) {
if (err) {
reject(err);
} else {
//use worker thread might gain better performance
var resultImage = base64_encode(contents);
resolve(resultImage)
}
});
})
}
//with help of server
return $.ajax({
type: "POST",
url: "images",
data: image1,
})
.then(result => result.image)
}
};
avoid download every time
//------------load all to local suit for less images----------
// if you have many images and you can use cheaper cache like file cache
//--init.js download all images, run only once
downloadAll()
//--server.js
//when image updated, let server know and flush cache
server.get('/imageupdated', (req, res) => {
downfile(req.params.imgid)
res.send('got it')
})
//form cache first
server.post('/image', (req, res) => {
memcache.get(req.body.imgid)
.then((content) => {
if (!content) return downfile(req.body.imgid)
res.send({
content
})
return true
})
.then((content) => {
if (content === true) return
res.send({
content
})
})
})
server.listen()
//--common.js download file and cache to memcache
function downfile(imgid) {
var base64 = ''
return bucket.download(imgid)
.then((file) => {
base64 = base64(file)
return memcache.set(imgid, base64)
})
.then(() => {
return base64
})
}
//downfileBatch
async function downfileBatch(skip, limit) {
return cloudDrive.getImages(skip, limit)
.then((list) => {
return Promise.all(list.map())
})
}
//down load all images
async function downloadAll() {
var i = 0,
limit = 5
while (true) {
var list = await downfileBatch(i, limit)
if (list.length < limit) {} else {
i += limit
}
}
return true
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With