The below server code is working fine for 5GB file using
wget http://localhost:11146/base/bigFile.zip
but not using client side code.
Server side code.
var http = require("http");
var fs = require("fs");
var filename = "base/bigFile.zip";
var serv = http.createServer(function (req, res) {
var stat = fs.statSync(filename);
res.writeHeader(200, {"Content-Length": stat.size});
var fReadStream = fs.createReadStream(filename);
fReadStream.on('data', function (chunk) {
if(!res.write(chunk)){
fReadStream.pause();
}
});
fReadStream.on('end', function () {
res.end();
});
res.on("drain", function () {
fReadStream.resume();
});
});
serv.listen(1114);
Client side code using Request module. What is wrong in this code?
var request = require('request')
request('http:/localhost:11146/base/bigFile.zip', function (error, response, body) {
console.log('error:', error); // Print the error if one occurred
console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received
console.log('body:', body); // Print the HTML for the Google homepage.
});
error for above client side code is below -
error: Error: Invalid protocol
at Request.init (/Users/air/Projects/node_modules/request/request.js:338:51)
at new Request (/Users/air/Projects//node_modules/request/request.js:105:8)
at request (/Users/air/Projects/Vertico/Vertico-CLI/node_modules/request/index.js:53:11)
at Object.<anonymous> (/Users/air/Projects/req.js:2:1)
at Module._compile (module.js:569:30)
at Object.Module._extensions..js (module.js:580:10)
at Module.load (module.js:503:32)
at tryModuleLoad (module.js:466:12)
at Function.Module._load (module.js:458:3)
at Function.Module.runMain (module.js:605:10)
statusCode: undefined
body: undefined
I revised client side to use
wget
shell process instead of request package, that code is below, the problem is - I am not able to see nice download progress of wget, any work around for this code, such that I can see the progress bar in child process.
const fs = require('fs');
const child_process = require('child_process');
var workerProcess = child_process.spawn('wget', ['-O','fdsf.zip', 'http://localhost:11146/base/bigFile.zip']);
workerProcess.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
workerProcess.stderr.on('data', function (data) {
//console.log('stderr: ' + data);
});
workerProcess.on('close', function (code) {
console.log('Download Completed' + code);
});
So finally I want to know how to download a file using client side code written in nodejs?
The Node. js stream feature makes it possible to process large data continuously in smaller chunks without keeping it all in memory. In other words, you can use streams to read from or write to a source continuously instead of using the traditional method of processing all of it at once.
At least 2GB of RAM. At least 4 vCPUs.
The most straightforward is fs. readFile() wherein, the whole file is read into memory and then acted upon once Node has read it, and the second option is fs. createReadStream() , which streams the data in (and out) similar to other languages like Python and Java.
Middleware frameworks, like Express. js, are suitable for small and medium projects. If you are going to develop a large project that will be supported by a large team of developers, Express. js is not the best choice.
UPDATE:
request
module is deprecatedIt is recommended to use active modules. My personal preference is got
Please refer got streams for examples.
Another alternative is node-fetch
Easiest way is to use request
module
Here you are trying to store entire result in memory and console log it. 5GB is pretty much large, either you must increase Node.js memory limit (not recommended) or you must use streams. See the streaming example below from request
npm documentation:
const fs = require('fs');
const request = require('request');
request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
You must pipe the response, so that whether is 1MB or 1GB or 1TB, only a fraction of the file will be in memory and it will be written to disk as soon as possible. You can use the same approach using Node.js built in functions, but implementation will be difficult and be like re-inventing the wheel when request
module is there.
For download with progress you can use request-progress
module along with request
module, see the example below (taken from their documentation):
var fs = require('fs');
var request = require('request');
var progress = require('request-progress');
// The options argument is optional so you can omit it
progress(request('https://az412801.vo.msecnd.net/vhd/VMBuild_20141027/VirtualBox/IE11/Windows/IE11.Win8.1.For.Windows.VirtualBox.zip'), {
// throttle: 2000, // Throttle the progress event to 2000ms, defaults to 1000ms
// delay: 1000, // Only start to emit after 1000ms delay, defaults to 0ms
// lengthHeader: 'x-transfer-length' // Length header to use, defaults to content-length
})
.on('progress', function (state) {
// The state is an object that looks like this:
// {
// percent: 0.5, // Overall percent (between 0 to 1)
// speed: 554732, // The download speed in bytes/sec
// size: {
// total: 90044871, // The total payload size in bytes
// transferred: 27610959 // The transferred payload size in bytes
// },
// time: {
// elapsed: 36.235, // The total elapsed seconds since the start (3 decimals)
// remaining: 81.403 // The remaining seconds to finish (3 decimals)
// }
// }
console.log('progress', state);
})
.on('error', function (err) {
// Do something with err
})
.on('end', function () {
// Do something after request finishes
})
.pipe(fs.createWriteStream('IE11.Win8.1.For.Windows.VirtualBox.zip'));
var request = require('request')
request('http://localhost:11146/base/bigFile.zip', function (error, response, body) {
console.log('error:', error); // Print the error if one occurred
console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received
console.log('body:', body); // Print the HTML for the Google homepage.
});
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With