I have a nodeJS(v.0.10.23) proxy connecting to a postgres db (node-postgres module v2.1.0), along with pgpool-II which returns all sorts of json data.
Back in the day, this is how connection errors were handled:
var after = function(callback) {
return function(err, queryResult) {
if(err) {
response.writeHead(500, _header);
console.log("ERROR: 500");
console.log(err);
return response.end(JSON.stringify({error: err}));
}
callback(queryResult)
}
};
Basically what it does, is consuming the response if no errors are present.
An in depth explanation can be found here: Node js - http.request() problems with connection pooling
Using the function above, i got something like this:
pg.connect(_conString, after(function(err, client, done) {
client.query(sql, after(function(result) {
...
done();
}
Since the context is lost when the function is passed into after()s callback, i'm loosing the ability to use the innate done() method passed by pg.connect().
Removing the after solves the issue, but then, in due time and with a fair amount of clients pulling the data, the node will hang until it is reset.
Is there a different way of consuming various asynchronous responses?
or perhaps a way to pass the pg.connect context into the callback?
Well, of course you're losing done()
, you never pass a third argument to your callback in your after()
function.
function after(cb) {
return function() {
// you're using this function in a context where the arguments
// passed in could be anything. the only constant is that the first
// argument is the error, if any
if (arguments[0]) {
response.writeHead(500, _header);
console.log("ERROR: 500");
console.log(err);
return response.end(JSON.stringify({error: arguments[0]}));
}
// apply the entire argument list to the callback, without modification
cb.apply(cb, arguments);
};
}
... this also fixes the dubious convention of passing client
through the queryResult
variable.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With