What is the best way to limit concurrency when using ES6's Promise.all()?

前端 未结 17 609
执念已碎
执念已碎 2020-11-29 21:28

I have some code that is iterating over a list that was queried out of a database and making an HTTP request for each element in that list. That list can sometimes be a rea

相关标签:
17条回答
  • 2020-11-29 21:51

    This is what I did using Promise.race, inside my code here

    const identifyTransactions = async function() {
      let promises = []
      let concurrency = 0
      for (let tx of this.transactions) {
        if (concurrency > 4)
          await Promise.race(promises).then(r => { promises = []; concurrency = 0 })
        promises.push(tx.identifyTransaction())
        concurrency++
      }
      if (promises.length > 0)
        await Promise.race(promises) //resolve the rest
    }
    

    If you wanna see an example: https://jsfiddle.net/thecodermarcelo/av2tp83o/5/

    0 讨论(0)
  • 2020-11-29 21:52

    It can be resolved using recursion.

    The idea is that initially you send maximum allowed number of requests and each of these requests should recursively continue to send itself on its completion.

    function batchFetch(urls, concurrentRequestsLimit) {
        return new Promise(resolve => {
            var documents = [];
            var index = 0;
    
            function recursiveFetch() {
                if (index === urls.length) {
                    return;
                }
                fetch(urls[index++]).then(r => {
                    documents.push(r.text());
                    if (documents.length === urls.length) {
                        resolve(documents);
                    } else {
                        recursiveFetch();
                    }
                });
            }
    
            for (var i = 0; i < concurrentRequestsLimit; i++) {
                recursiveFetch();
            }
        });
    }
    
    var sources = [
        'http://www.example_1.com/',
        'http://www.example_2.com/',
        'http://www.example_3.com/',
        ...
        'http://www.example_100.com/'
    ];
    batchFetch(sources, 5).then(documents => {
       console.log(documents);
    });
    
    0 讨论(0)
  • 2020-11-29 21:52

    Recursion is the answer if you don't want to use external libraries

    downloadAll(someArrayWithData){
      var self = this;
    
      var tracker = function(next){
        return self.someExpensiveRequest(someArrayWithData[next])
        .then(function(){
          next++;//This updates the next in the tracker function parameter
          if(next < someArrayWithData.length){//Did I finish processing all my data?
            return tracker(next);//Go to the next promise
          }
        });
      }
    
      return tracker(0); 
    }
    
    0 讨论(0)
  • 2020-11-29 21:54

    bluebird's Promise.map can take a concurrency option to control how many promises should be running in parallel. Sometimes it is easier than .all because you don't need to create the promise array.

    const Promise = require('bluebird')
    
    function getCounts() {
      return Promise.map(users, user => {
        return new Promise(resolve => {
          remoteServer.getCount(user) // makes an HTTP request
          .then(() => {
            /* snip */
            resolve();
           });
        });
      }, {concurrency: 10}); // <---- at most 10 http requests at a time
    }
    
    0 讨论(0)
  • 2020-11-29 21:58

    Unfortunately there is no way to do it with native Promise.all, so you have to be creative.

    This is the quickest most concise way I could find without using any outside libraries.

    It makes use of a newer javascript feature called an iterator. The iterator basically keeps track of what items have been processed and what haven't.

    In order to use it in code, you create an array of async functions. Each async function asks the same iterator for the next item that needs to be processed. Each function processes its own item asynchronously, and when done asks the iterator for a new one. Once the iterator runs out of items, all the functions complete.

    Thanks to @Endless for inspiration.

    var items = [
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
        "https://www.stackoverflow.com",
    ];
    
    var concurrency = 5
    
    Array(concurrency).fill(items.entries()).map(async (cursor) => {
        for(let [index, url] of cursor){
            console.log("getting url is ", index, url);
            // run your async task instead of this next line
            var text = await fetch(url).then(res => res.text());
            console.log("text is", text.slice(0,20));
        }
    })

    0 讨论(0)
  • 2020-11-29 22:04

    Here goes basic example for streaming and 'p-limit'. It streams http read stream to mongo db.

    const stream = require('stream');
    const util = require('util');
    const pLimit = require('p-limit');
    const es = require('event-stream');
    const streamToMongoDB = require('stream-to-mongo-db').streamToMongoDB;
    
    
    const pipeline = util.promisify(stream.pipeline)
    
    const outputDBConfig = {
        dbURL: 'yr-db-url',
        collection: 'some-collection'
    };
    const limit = pLimit(3);
    
    async yrAsyncStreamingFunction(readStream) => {
            const mongoWriteStream = streamToMongoDB(outputDBConfig);
            const mapperStream = es.map((data, done) => {
                    let someDataPromise = limit(() => yr_async_call_to_somewhere())
    
                        someDataPromise.then(
                            function handleResolve(someData) {
    
                                data.someData = someData;    
                                done(null, data);
                            },
                            function handleError(error) {
                                done(error)
                            }
                        );
                    })
    
                await pipeline(
                    readStream,
                    JSONStream.parse('*'),
                    mapperStream,
                    mongoWriteStream
                );
            }
    
    0 讨论(0)
提交回复
热议问题