I have an array that contains an array of promises, and each inner array could have either 4k, 2k or 500 promises.
In total there are around 60k promises and I may test it with other values as well.
Now I need to execute the Promise.all(BigArray[0])
.
Once the first inner array is done, I need to execute the next Promise.all(BigArray[1])
and so on and so on.
If I try to execute a Promise.all(BigArray)
it's throwing:
fatal error call_and_retry_2 allocation failed - process out of memory
I need to execute it each of promises in series, not in parallel which I think that’s what Node its doing. I shouldn't use new libs however am willing to consider the answer!.
Edit:
Here is an example piece of code:
function getInfoForEveryInnerArgument(InnerArray) {
const CPTPromises = _.map(InnerArray, (argument) => getDBInfo(argument));
return Promise.all(CPTPromises)
.then((results) => {
return doSomethingWithResults(results);
});
}
function mainFunction() {
BigArray = [[argument1, argument2, argument3, argument4], [argument5, argument6, argument7, argument8], ....];
//the summ of all arguments is over 60k...
const promiseArrayCombination = _.map(BigArray, (InnerArray, key) => getInfoForEveryInnerArgument(InnerArray));
Promise.all(promiseArrayCombination).then((fullResults) => {
console.log(fullResults);
return fullResults;
})
}
Your question is a bit misnamed which may have confused some folks in this question and in the previous version of this question. You are trying to execute a batch of async operations in series, one batch of operations, then when that is done execute another batch of operations. The results of those async operations are tracked with promises. Promises themselves represent async operations that have already been started. "Promises" aren't executed themselves. So technically, you don't "execute a batch of promises in series". You execute a set of operations, track their results with promises, then execute the next batch when the first batch is all done.
Anyway, here's a solution to serializing each batch of operations.
You can create an inner function which I usually call next()
that lets you process each iteration. When the promise resolves from processing one innerArray, you call next()
again:
function mainFunction() {
return new Promise(function(resolve, reject) {
var bigArray = [[argument1, argument2, argument3, argument4], [argument5, argument6, argument7, argument8], ....];
//the summ of all arguments is over 60k...
var results = [];
var index = 0;
function next() {
if (index < bigArray.length) {
getInfoForEveryInnerArgument(bigArray[index++]).then(function(data) {
results.push(data);
next();
}, reject);
} else {
resolve(results);
}
}
// start first iteration
next();
});
}
This also collects all the sub-results into a results array and returns a master promise who's resolved value is this results array. So, you could use this like:
mainFunction().then(function(results) {
// final results array here and everything done
}, function(err) {
// some error here
});
You could also use the .reduce()
design pattern for iterating an array serially:
function mainFunction() {
var bigArray = [[argument1, argument2, argument3, argument4], [argument5, argument6, argument7, argument8], ....];
return bigArray.reduce(function(p, item) {
return p.then(function(results) {
return getInfoForEveryInnerArgument(item).then(function(data) {
results.push(data);
return results;
})
});
}, Promise.resolve([]));
}
This creates more simultaneous promises than the first option and I don't know if that is an issue for such a large set of promises (which is why I offered the original option), but this code is cleaner and the concept is convenient to use for other situations too.
FYI, there are some promise add-on features built for doing this for you. In the Bluebird promise library (which is a great library for development using promises), they have Promise.map()
which is made for this:
function mainFunction() {
var bigArray = [[argument1, argument2, argument3, argument4], [argument5, argument6, argument7, argument8], ....];
return Promise.map(bigArray, getInfoForEveryInnerArgument);
}
In addition, if original array is not of promises but of objects that should be processed, batch processing can be done without an external dependency using combination of Array.prototype.map()
, Array.prototype.slice()
and Promise.all()
:
// Main batch parallelization function.
function batch(tasks, pstart, atonce, runner, pos) {
if (!pos) pos = 0;
if (pos >= tasks.length) return pstart;
var p = pstart.then(function() {
output('Batch:', pos / atonce + 1);
return Promise.all(tasks.slice(pos, pos + atonce).map(function(task) {
return runner(task);
}));
});
return batch(tasks, p, atonce, runner, pos + atonce);
}
// Output function for the example
function output() {
document.getElementById("result").innerHTML += Array.prototype.slice.call(arguments).join(' ') + "<br />";
window.scrollTo(0, document.body.scrollHeight);
}
/*
* Example code.
* Note: Task runner should return Promise.
*/
function taskrunner(task) {
return new Promise(function(resolve, reject) {
setTimeout(function() {
output('Processed:', task.text, 'Delay:', task.delay);
resolve();
}, task.delay);
});
}
var taskarray = [];
function populatetasks(size) {
taskarray = [];
for (var i = 0; i < size; i++) {
taskarray.push({
delay: 500 + Math.ceil(Math.random() * 50) * 10,
text: 'Item ' + (i + 1)
});
}
}
function clean() {
document.getElementById("result").innerHTML = '';
}
var init = Promise.resolve();
function start() {
var bsize = parseInt(document.getElementById("batchsize").value, 10),
tsize = parseInt(document.getElementById("taskssize").value, 10);
populatetasks(tsize);
init = batch(taskarray.slice() /*tasks array*/ , init /*starting promise*/ , bsize /*batch size*/ , taskrunner /*task runner*/ );
}
<input type="button" onclick="start()" value="Start" />
<input type="button" onclick="clean()" value="Clear" /> Batch size:
<input id="batchsize" value="4" size="2"/> Tasks:
<input id="taskssize" value="10" size="2"/>
<pre id="result" />
@jfriend00 Just adding to your answer using async/await
with reduce
:
function runPromisesInSeries(bigArray, getInfoForEveryInnerArgument) {
try {
return bigArray.reduce(async (acc, cItem) => {
const results = await acc
const data = await getInfoForEveryInnerArgument(cItem)
results.push(data)
return results
}, Promise.resolve([]))
} catch (err) {
throw err
}
}
You can do it recursively, for example here I needed to put about 60k documents in mongo, but it was too big, to do it in one step, therefore I take 1k documents, send them to the mongo, after it is finished I take another 1k documents etc.
exports.rawRecursive = (arr, start) => {
//ending condition
if (start > arr.length) {
return;
}
Rawmedicament.insertManyAsync(_.slice(arr, start, start + 1000)).then(() => {
//recursive
exports.rawRecursive(arr, start + 1000);
});
};
If you want to notice, when everything is done, you can in ending condition put callback or if you like Promises you can call resolve() there.
来源:https://stackoverflow.com/questions/37213316/execute-batch-of-promises-in-series-once-promise-all-is-done-go-to-the-next-bat