I am working on something where the user can upload N number of files in one go. However, since I am also sending N parallel requests at once (1 request per file to upload) it can easily choke the server and it would start throwing 429 (Too Many Requests) error for the requests made.
Is there some way I can hold a request from being made (without blocking the UI as well) if there are already K pending requests for the same?
Here's a minimal code block to give an idea about what I'm currently doing:
filesToUpload.forEach(function(file) {
// abstract function which converts the given file to base64,
// prepares the payload, makes a POST request and returns a promise
upload(file, successCallback, errorCallback);
}
CodePudding user response:
I believe the best approach would be to send the HTTP request inside an async function.
The async and await keywords enable asynchronous, promise-based behavior
The await keyword will wait for your post request to be completed before sending a new HTTP request to the server where these files are being stored.
You can find more information on the async/await keywords and their syntax here.
CodePudding user response:
You could do it sequentially using recursion. This would need modifying to handle errors.
(function fn(files, successCallback, errorCallback){
if(files.length){
upload(files.shift(), (...args) => {
successCallback(...args);
fn(files, successCallback, errorCallback)
}, errorCallback);
}
})(filesToUpload, successCallback, errorCallback);
CodePudding user response:
Using a loop and a queue should work out for your case.
const UPLOAD_QUEUE = [];
let progress_queue = [];
const MAX_UPLOADS = 5;
throttledUpload();
function throttledUpload() {
const max = MAX_UPLOADS - progress_queue.length;
for(let i = 0; i < UPLOAD_QUEUE.length; i ){
if(i > max) break;
uploadFile(UPLOAD_QUEUE[i], i);
}
progress_queue = progress_queue.concat(UPLOAD_QUEUE.splice(0, max));
}
async function uploadFile(file, idx) {
try {
await upload(file, successCallback, errorCallback);
throttledUpload();
} catch (err) {
UPLOAD_QUEUE.push(file);
} finally {
progress_queue.splice(idx, 1);
throttledUpload();
}
}
CodePudding user response:
This one uses classic callbacks: allows for up to K number of parallel uploaded, and waits otherwise. The function upload
can be converted to Promise.
var filesToUpload = [
"file1.txt",
"file2.txt",
"file3.txt",
"file4.txt",
"file5.txt",
"file6.txt",
"file7.txt",
"file8.txt",
]
function upload(filename, callback) {
// abstract function which converts the given file to base64,
// prepares the payload, makes a POST request and returns a promise
setTimeout(function() {
console.log("<!-- uploaded " filename);
typeof callback === 'function' && callback()
}, Math.random() * 1000 500)
}
function start_sequence(filesToUpload, maxK, callback) {
var K = maxK;
function do_loop(filesToUpload, callback) {
if (!filesToUpload.length) {
if (K == maxK) {
// end the loop
typeof callback === 'function' && callback();
}
return;
}
// upload now first K's
while (K > 0) {
K--
var first = filesToUpload.shift();
console.log("--> sending " first);
upload(first, function() {
K ;
do_loop(filesToUpload, callback)
});
}
}
// start the loop
do_loop(filesToUpload, callback);
}
start_sequence(filesToUpload, 3, function() {
console.log("all done!")
});
.as-console-wrapper {
max-height: 100% !important;
top: 0;
}