问题
I am attempting to write a "robocopy /mir" like function within Node.js and cannot seem to wrap my head around how to properly execute several async functions in order.
Some background:
- The script is run on Windows, therefore, I needed to find some way to copy files while retaining modification time AND receiving progress notifications.
- To solve this problem, I went ahead & wrote my copy function in .NET (calling it with Edge.js)--this copy function simply calls back to a Node function reporting file copy progress. This piece works flawlessly.
To have the files copy in order, my first thought was to do something like follows:
Object.keys(filesToCopy).forEach(function(key) {
var deferred = q.defer();
var payload = {
sourcePath: key,
destPath: filesToCopy[key],
progressCallback: progressCallback
};
console.log('Copying %s...', sourcePath);
// Edge.js called here
copyFile(payload, deferred.makeNodeResolver());
deferred.promise.then(function(result) {
console.log('%s complete.', result);
}, function(err) {
console.error('Error: ', err.message);
});
promises.push(deferred.promise);
});
Unfortunately, this (as expected) begins copying each files as soon as the .NET function is called, therefore, I get progress notifications for all files at once giving me output like:
1%
2%
1%
2%
3%
3%
It seems like I need a way to queue up the work to be done before firing it off all at once, with each item completing before the next proceeds. When all items are complete I would need to be notified. The solution seems simple enough but continues to elude me as every angle I try comes with another issue. Any help would be greatly appreciated, thank you!
EDIT: As stated in my comment, the answer Bergi provided was utilizing a function which did in fact return a promise whilst my Edge.js function did not. I was able to resolve my issue first by utilizing an array instead of an object for filesToCopy
, then doing something like so:
return filesToCopy.reduce(function(prev, curr) {
return prev.then(function() {
var deferred = q.defer();
copyFile(curr, function(err, result) {
deferred.resolve(result);
console.log('Completed %s', result);
});
return deferred.promise;
})
}, q());
This may not be the best way to do this but it works for my uses.
回答1:
Maybe something like that will do the trick :
var $j = function(val, space) {
return JSON.stringify(val, null, space || '')
}
var log = function(val) {
document.body.insertAdjacentHTML('beforeend', '<div><pre>' + val + '</div></pre>')
}
var files = '12345'.split('').map(function(v) {
return {
name: 'file_' + v + '.js',
load: function() {
var cur = this;
var pro = new Promise(function(resolve, reject) {
log('loading : ' + cur.name);
// we simualate the loading stuff
setTimeout(function() {
resolve(cur.name);
}, 1 * 1000);
}).then( function( val ) {
// once loaded
log('loaded : ' + val);
return val;
});
return pro;
}
};
});
files.reduce(function(t, v) {
t.promise = t.promise.then(function(){
return v.load();
});
return t;
}, {
promise: Promise.resolve(1)
});
回答2:
Use async.eachSeries
on arrays, or async.forEachOfSeries
on objects.
Looping an object
var async = require('async');
var filesObject = {'file/path/1': {}, 'file/path/2': {}};
async.forEachOfSeries(filesObject, copyFileFromObj, allDone);
function copyFileFromObj(value, key, callback) {
console.log('Copying file ' + key + '...');
callback(); // when done
}
function allDone(err) {
if (err) {
console.error(err.message);
}
console.log('All done.');
}
Looping an array
var async = require('async');
var filesArray = ['file/path/1', 'file/path/2'];
async.eachSeries(filesArray, copyFile, allDone);
function copyFile(file, callback) {
console.log('Copying file ' + file + '...');
callback(); // when done
}
function allDone(err) {
if (err) {
console.error(err.message);
}
console.log('All done.');
}
Working example here: https://tonicdev.com/edinella/sync-loop-of-async-operations
来源:https://stackoverflow.com/questions/33549460/how-can-i-properly-call-a-list-of-async-functions-in-order