Home > database >  for of loop how to call API endpoints with delay?
for of loop how to call API endpoints with delay?

Time:09-28

In my Next.js application I am trying to call API endpoints sequentially with getStaticProps, I have third party API which only allows 5 request per second, I want to execute first 5 call at the beginning, then wait to certain time and call another 4 call, I tried to implement sequential requests with for of loop, It doesn't seem to work, all the function calls are execute at the same time. what is proper way to solve this issue?

    const asyncCalls = [
        testApi1.getInfo1(),
        testApi2.getInfo2(),
        testApi3.getInfo3(),
        testApi1.getInfo4(),
        testApi2.getInfo5(),
        testApi3.getInfo6(),
        testApi1.getInfo7(),
        testApi2.getInfo8(),
        testApi3.getInfo9(),
     ]
     
    function delayMyPromise(myPromise, myDelay) {
      return new Promise(function (resolve, reject) {
        setTimeout(function () {
          return resolve(myPromise)
        }, myDelay)
      })
    }
    
    
    const resolved = []

    for (const item of asyncCalls) {
      const response = await delayMyPromise(item, 1000)
      resolved.push(response)
    }

    console.log(resolved)

CodePudding user response:

You can use a batch approach using Promise.all(). There is no need to send the requests sequentially.

This solution waits for all requests to return and then waits a second before sending the next requests.

// list of items/ data that you might want to use to compose your request
let dataForCalls = [
    { info: 1 },
    { info: 2 },
    { info: 3 },
    { info: 4 },
    { info: 5 },
    { info: 6 },
    { info: 7 },
    { info: 8 },
    { info: 9 },
];


const waitForMs = (ms) => new Promise((resolve, reject) => setTimeout(() => resolve(), ms));

(async () => {
    // number of concurrent requests in one batch
    const batchSize = 5;
    // request counter
    let curReq = 0;
    // as long as there are items in the list continue to form batches
    while (curReq < dataForCalls.length) {
        // a batch is either limited by the batch size or it is smaller than the batch size when there are less items required
        const end = dataForCalls.length < curReq   batchSize ? dataForCalls.length: curReq   batchSize;
        // we know the number of concurrent request so reserve memory for this
        const concurrentReq = new Array(batchSize);
        // issue one request for each item in the batch
        for (let index = curReq; index < end; index  ) {
            concurrentReq.push(fetch("https://apichallenges.herokuapp.com/mirror/request"   new URLSearchParams(dataForCalls[index])))
            console.log(`sending request ${curReq}...`)
            curReq  ;
        }
        // wait until all promises are done or one promise is rejected
        await Promise.all(concurrentReq);
        console.log(`requests ${curReq - batchSize}-${curReq} done.`)
        if(curReq   1 < dataForCalls.length){
          // after requests have returned wait for one second
          console.log(`[${new Date().toISOString()}] Waiting a second before sending next requests...`)
          await waitForMs(1000);
          console.log(`[${new Date().toISOString()}] At least one second has gone.`)
        }
        
    }
})();

Edit

An even nicer solution can be obtained by dropping the if and using the following instead:

// wait at least a second or if the requests take longer than a second wait till they are done
await Promise.all([waitForMs(1000), Promise.all(concurrentReq)]);

// list of items/ data that you might want to use to compose your request
let dataForCalls = [
    { info: 1 },
    { info: 2 },
    { info: 3 },
    { info: 4 },
    { info: 5 },
    { info: 6 },
    { info: 7 },
    { info: 8 },
    { info: 9 },
];

const waitForMs = (ms) => new Promise((resolve, reject) => setTimeout(() => resolve(), ms));

(async () => {
    // number of concurrent requests in one batch
    const batchSize = 5;
    // request counter
    let curReq = 0;
    // as long as there are items in the list continue to form batches
    while (curReq < dataForCalls.length) {
        // a batch is either limited by the batch size or it is smaller than the batch size when there are less items required
        const end = dataForCalls.length < curReq   batchSize ? dataForCalls.length: curReq   batchSize;
        // we know the number of concurrent request so reserve memory for this
        const concurrentReq = new Array(batchSize);
        // issue one request for each item in the batch
        for (let index = curReq; index < end; index  ) {
            concurrentReq.push(fetch("https://apichallenges.herokuapp.com/mirror/request"   new URLSearchParams(dataForCalls[index])))
            console.log(`sending request ${curReq}...`)
            curReq  ;
        }
        console.log(`requests ${curReq - batchSize}-${curReq} done.`)
        // wait at least a second or if the requests take longer than a second wait till they are done
        await Promise.all([waitForMs(1000), Promise.all(concurrentReq)]);
    }
})();

If you do not want to wait till the promises finish and just send the requests every second, you only need to await the waitForMs() call and forget about the currently running requests. However in that case you might have more than 5 requests at any one time as requests might take longer than a second but you will be guranteed (within the gurantees of setTimeout()) to fire 5 requests every second. In the end you will however need to await all Promises to gurantee that every sent request has been awaited.

// list of items/ data that you might want to use to compose your request
let dataForCalls = [
    { info: 1 },
    { info: 2 },
    { info: 3 },
    { info: 4 },
    { info: 5 },
    { info: 6 },
    { info: 7 },
    { info: 8 },
    { info: 9 },
];

const waitForMs = (ms) => new Promise((resolve, reject) => setTimeout(() => resolve(), ms));

(async () => {
    // number of concurrent requests in one batch
    const batchSize = 5;
    const allRequests = []
    // request counter
    let curReq = 0;
    // as long as there are items in the list continue to form batches
    while (curReq < dataForCalls.length) {
        // a batch is either limited by the batch size or it is smaller than the batch size when there are less items required
        const end = dataForCalls.length < curReq   batchSize ? dataForCalls.length : curReq   batchSize;
        // issue one request for each item in the batch
        for (let index = curReq; index < end; index  ) {
            // create an array with all requests, not just one for each batch
            allRequests.push(fetch("https://apichallenges.herokuapp.com/mirror/request"   new URLSearchParams(dataForCalls[index])))
            console.log(`sending request ${curReq}...`)
            curReq  ;
        }
        console.log(`requests ${curReq - batchSize}-${curReq} done.`)
        // wait at least a second (see how setTimeout() works in linked Docs)
        await waitForMs(1000);
    }
    // await all requests, not just those of a batch
    await Promise.all(allRequests);
    console.log(`All requests are done`);
})();

  • Related