Ah @dh00mk3tu I'm sorry while the code I posted did batch the requests it didn't break the batches up at all so you'd still be making all the calls at once, my mistake.
With the for loop pattern you'd need to move the delay to be between each batch instead of removing it completely, with this you can control how much time you'd like between batches:
async function main() {
for(let batchNumber = 0; batchNumber < batchLimit; batchNumber++){
const batchStart = batchNumber * batchSize;
const batchEnd = batchStart + batchSize;
console.log("Running batch " + batchNumber);
for(let i = batchStart; i < batchEnd; i++){
const data = rows[i];
console.log("Running query for row: " + i);
uploadCall(data, i);
}
await delay(500);
}
console.log("Finished running all batches");
}
When batching with promises wait for the moment all promises in the batch finish before calling the next:
async function main() {
for(let batchNumber = 0; batchNumber < batchLimit; batchNumber++){
const batchStart = batchNumber * batchSize;
const batchEnd = batchStart + batchSize;
const batch = rows.slice(batchStart, batchEnd);
console.log("Running batch " + batchNumber);
const promises = batch.map((item) => {
return UploadGameStatsToFirebase.trigger({
additionalScope: {
data: item
}
});
});
await Promise.all(promises);
}
console.log("Finished running all batches");
}
Have you tested running your queries with a smaller number of rows? I want to double-check that each batch can run successfully as well before trying to run them all together.
Edit: The lodash _.chunk function might also be worth exploring here. I image something like this may work as well!
async function main() {
const batches = _.chunk(rows, batchSize);
for(let i in batches){
console.log("Running batch " + i);
const promises = batches[i].map((item) => {
return UploadGameStatsToFirebase.trigger({
additionalScope: {
data: item
}
});
});
await Promise.all(promises);
}
console.log("Finished running all batches");
}