I have an array which keeps URL of several files. For example:
var files = [\'1.html\', \'2.html\', \'3.html\'];
I need to read them asynch
The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files
at the command line, here's the code I used:
var files = ['1.html', '2.html', '3.html'];
console.log("\n");
readMultipleFiles(files, 'utf8', function(err, inputFiles) {
if(err) {
console.log("Read Error: " + err);
}
fileOne = inputFiles[0];
fileTwo = inputFiles[1];
...
console.log(fileOne);
console.log(fileTwo);
});
When your callback to readFile
executes, the for loop will already have finished. So i
will be files.length
and files[i]
will be undefined
. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile
call, and call that in the loop:
function read(file) {
require('fs').readFile(file, 'utf8', function (error,data) {
cache[file]=data;
});
}
for(var i = 0; i < files.length; i++){
read(files[i]);
}
For even better execution control, you might want to look into async:
function readAsync(file, callback) {
fs.readFile(file, 'utf8', callback);
}
async.map(files, readAsync, function(err, results) {
// results = ['file 1 content', 'file 2 content', ...]
});
Edit: Made use of helper function for async example.