I\'m new to Node.Js and JavaScript web development on the backend. I see that callbacks inside callbacks could be a pain and there are modules to avoid that. One of these mo
Here is the code modified to use async whenever needed.
function check_auth_user(username, password) {
var client = new pg.Client("pg://user:pass@127.0.0.1/database");
async.waterfall([
client.connect,
function(callback) {
client.query('select * from "user" where username = $1 and password = $2', [username, password], callback);
},
function(result, callback) {
if(result.rowCount > 0) {
var res = result.rows[0];
async.series([
function(callback) {
passport.serializeUser(res, function(res, done) {
callback(null, res);
}
},
function(res, callback){
if(res) {
passport.deserializeUser(res, function(res, done) {
callback(null, res);
});
} else {
callback(new Error('SerializeUser failed'));
}
}
], function(err, res) {
if(err) {
callback(err);
} else {
callback(null);
}
});
}
else {
callback(new Error("No result"));
}
}
], function(err, result) {
if(err)
console.err(err);
});
}
Obviously, it does not make the code any more readable. I would suggest additional changes:
These two methods would both take callbacks. Here is a re-write:
// Given a client, queries for user
function retrieveUser( client, username, password, cb) {
client.query('select * from "user" where username = $1 and password = $2', [username, password], function(err, users){
if(err) {
cb(err);
}
else if(users.rowCount < 0) {
cb(new Error("No user found for username ="+username));
}
else {
cb(null, result.rows[0]);
});
}
//Uses passport to serialize/deserializeUser
function passportSerialize(user, cb){
async.series([
function(callback) {
passport.serializeUser(user, function(res, done) {
callback(null, res);
}
},
function(res, callback){
if(res) {
passport.deserializeUser(res, function(res, done) {
if(res) {
callback(null, res);
} else {
callback(new Error('deserializeUser failed'));
}
});
} else {
callback(new Error('SerializeUser failed'));
}
}
], cb);
}
Thus, our main method now becomes:
function check_auth_user(username, password) {
var client = new pg.Client("pg://user:pass@127.0.0.1/database");
async.waterfall([
client.connect,
function(callback) {
retrieveUser(client, username, password, callback);
},
function(user, callback) {
passportSerialize(user, callback);
}
], function(err, result) {
if(err)
console.err(err);
else
console.log("User authenticated, let her in");
});
}
I hope you can see how this is much much better.
In my view, callback hell is really a mixture of two problems:
Either one in small amounts is fine, but together they make code rigid and unmaintainable. The solution to avoiding callback hell is to avoid these two things by:
Going by these principles, your code can be rewritten as:
function check_auth_user(username, password, done) {
// Make a new client and open the connection.
function connect(callback) {
var client = new pg.Client("pg://user:pass@127.0.0.1/database");
client.connect(function (err) {
if (err) {
console.error('could not connect to postgres', err);
return callback(err);
}
callback(null, client);
});
}
// Query the database.
function query(callback, results) {
var client = results.client;
var q = 'select * from "user" where username = $1 and password = $2';
var params = [username, password];
client.query(q, params, function (err, result) {
if (err) {
console.error('error running query', err.stack || err.message);
return callback(err);
}
callback(null, result);
});
}
// Do stuff with the result of the query.
function handleQueryResult(callback, results) {
var result = results.query;
if (result.rowCount === 0) {
return callback();
}
var row = result.rows[0];
console.log(row);
passport.serializeUser(function(res, done) {
done(null, res);
});
passport.deserializeUser(function(user, done) {
done(null, res);
});
callback(null, row);
}
// Let async handle the order. Allows remixing.
async.auto({
connect: [connect],
query: ['connect', query],
row: ['query', handleQueryResult]
}, function (err, results) {
if (err) {
return done(err);
}
// Callback with the row.
done(null, results.row);
});
}
I've used async.auto here, even if async.waterfall
would do. The reasoning behind that is that it's difficult to move, add or remove steps in a waterfall
, and that's been a source of bugs. The auto
lets you add steps without worrying and the order/parallelism is handled by async.
This is obviously using a lot more vertical space, but I think that's a small price to pay for the modularity.
There are ways to combat runaway nesting using functional programming techniques. I use the curry module to break loop bodies out into stand-alone routines; usually this carries a very minor performance hit versus nesting (study curry
for why). Example:
var curry = require('curry'),
async = require('async');
var eachBody = curry(function(context, item, callback) {
callback(null, context + item); // first argument is error indicator
});
function exampleLoop(data, callback) {
// use the curried eachBody instead of writing the function inline
async.map(data, eachBody(data.length), callback);
}
function print(err, result) {
console.log(result);
}
exampleLoop([2, 4, 6], print); // prints [5, 7, 9]
exampleLoop([2, 4, 6, 7], print); // prints [6, 8, 10, 11]
Instead of:
var async = require('async');
function exampleLoop(data) {
async.map(data, function(item, callback) {
callback(null, data.length + item);
}, function(err, result) {
console.log(result);
});
}
exampleLoop([2, 4, 6]); // prints [5, 7, 9]
exampleLoop([2, 4, 6, 7]); // prints [6, 8, 10, 11]
The second example is more compact, but the more nesting you add, the less readable it becomes. Also, by splitting up the implementation, you can reuse component functions in multiple ways, and you can write unit tests for the individual component functions, not only for the high-level functionality.