Does Node.js handle the client requests one by one? I have a project, which is using Node.js as a server-side proxy. According to my understanding, if you use a callback for a response, Node.js should respond to the new request without any delay, but actually the Node.js won't respond to the new request until it has finished the last callback. Is this the correct action or is there some incorrect code usage? Please help me on this. Thank you very much. Below is the code for re-sending requests to the back-end service.
var request = require('request');
var http = require('http');
function apiService(){}
apiService.prototype.get = function (context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "GET");
}
apiService.prototype.post = function(context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "POST");
}
apiService.prototype.del = function(context, payload, callback, url){
return this.requestAPI(context, payload, callback, url, "DELETE");
}
apiService.prototype.requestAPI = function(context, payload, callback, url, method){
var config = context.config;
var targetUrl = config.APIHost
+ (config.port == null ? "": (":" + config.port))
+ "/"
+ config.baseDir
+ "/"
+ url;
var requestObj = {
url : targetUrl,
json : true,
pool: new http.Agent()
}
if (config.proxy != null){
requestObj.proxy = config.proxy;
}
switch (method){
case "POST":
requestObj.body = payload;
request.post(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
case "GET":
var queryString = "";
for (att in payload){
if (queryString != "")
queryString += "&";
queryString += att.toString() + "=" + payload[att];
}
if (queryString!="")
requestObj.url += "?" + queryString;
request.get(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
case "DELETE":
requestObj.body = payload;
request.del(requestObj, function (err, resp, body){
if (err){
callback(err);
return;
}
callback(null, body);
});
break;
}
}
Current process
client request1 -> Node
client request2 -> Node
Node server request1 ->backend
Node (waiting for event loop)
Node <- server response1 backend
client <- response1 Node
Node server request2 ->backend
Node <- server response2 backend
client <- response2 Node
What I think it should be
client request1 -> Node
client request2 -> Node
Node server request1 -> backend
Node (won't waiting for event loop)
Node server request2 -> backend
Node <- server response2 backend
client <-response2 Node
Node <- server response1 backend
client <-response1 Node
Updated:
var params = {
action : 'list'
};
$http.post('../../service/buildingBlockService', params)
.success(function(buildingBlocks){
callback(null, buildingBlocks);
}).error(function(error){
callback(error);
})
The client requests are from Angular $http
. On one page, there are several requests at the same time. According to my Fiddler monitor, the requests from the browser are sent without waiting, but the server re-send is one by one.
app.post('/service/buildingBlockService', function (ctx, payload, req, res) {
var service1 = require('./service/buildingBlockService');
var service = new service1();
service.service(ctx, payload, function(error, result){
res.send(200, result);
});
});
Updated: The middle layer of server request.
var service = require('./apiService');
function BuildingBlockService(){
}
BuildingBlockService.prototype.init = function(){}
BuildingBlockService.prototype.service = function(context, payload, callback) {
var apiService = new service()
var params = payload;
switch (params.action){
case "list":
default:
apiService.get(context, null, callback, "BuildingBlocks");
break;
}
};
module.exports = BuildingBlockService;
Does Node.js handle client requests one by one?
Yes and No. node.js runs your JS single threaded. That means that only one JS thread of execution is running at any given time. So, if you had two requests like this:
// don't use this in real code, it's to simulate a point
function spin(t) {
var start = Date.now();
while (Date.now() < start + t) {}
}
app.post("/route1", req, res) {
console.log("starting processing of /route1 request");
// simulate taking several seconds of pure CPU to make a response
spin(2000);
res.send("done 1");
console.log("finished processing of /route1 request");
}
app.post("/route2", req, res) {
console.log("starting processing of /route2 request");
// simulate taking several seconds of pure CPU to make a response
spin(2000);
res.send("done 2");
console.log("finished processing of /route2 request");
}
And a /route1 request was immediately followed by a /route2 request, then the node.js server would process the /route1 request and not be able to do anything else until it was done with that request because the CPU was kept busy the entire time.
So, this would produce a log like this:
starting processing of /route1 request
finished processing of /route1 request
starting processing of /route2 request
finished processing of /route2 request
But, it is relatively rare that requests take a long time purely for CPU reasons. Often requests involve some sort of I/O (files to read, database queries, other servers to contact, etc...). If that I/O is done in an async fashion using async IO and not using synchronous IO, then multiple requests can easily be in flight at the same time and will be in flight at the same time because while the node.js server is waiting for I/O requests to complete, it is free to serve other requests and will serve other requests.
So, if you had this server code:
app.post("/route1", req, res) {
console.log("starting processing of /route1 request");
// simulate taking several seconds of pure CPU to make a response
request('http://www.google.com', function (error, response, body) {
if (!error && response.statusCode == 200) {
res.send("done 1");
console.log("finished processing of /route1 request");
}
});
}
app.post("/route2", req, res) {
console.log("starting processing of /route2 request");
// simulate taking several seconds of pure CPU to make a response
request('http://www.google.com', function (error, response, body) {
if (!error && response.statusCode == 200) {
res.send("done 2");
console.log("finished processing of /route2 request");
}
});
}
And a /route1 request was immediately followed by a /route2 request, then you would likely see this log (the order that the /route1 and /route2 responses finishes is not guaranteed - they could be in any order), but both responses will be processed in parallel:
starting processing of /route1 request
starting processing of /route2 request
finished processing of /route1 request
finished processing of /route2 request
If your node.js proxy server appears to be exhibiting serial processing behavior rather than parallel processing behavior, then that it could be that there is some sort of implementation issue in how you are implementing the proxy as it is certainly capable of having multiple requests in flight at the same time.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With