I am a beginner at nodejs. My problem is that I have 16 clients sending requests to the server. The server must aggregate 16 requests and send them to the client. (each client will receive 16 requests, of which 15 are from other clients). How can I add delay to server (nodejs) while waiting for request aggregation then send it to each client. Can you help me.
CodePudding user response:
You have to ensure that all the clients will send a parameter called batchId
. You can simply,
Keep a cache
Put the request body & response in there with a
batchId
On every client request, push the body & response using
batchId
.On every client request, check if the batch size is 16
- If
16
, aggregate and response all the requests at once. - Delete the local cache of the batch.
- If
const requestsCache = {};
function aggregateAndRespond(batchId) {
const arrivedRequests = requestsCache[batchId];
// process "body" property in the "arrivedRequests" elements
const result = processRequests(arrivedRequests);
for(let arrivedRequest of arrivedRequests) {
arrivedRequest.response.json(result);
}
delete requestsCache[batchId];
}
app.post('/client-requests', function(request, response) {
const body = request.body;
const batchId = body.batchId;
const arrivedRequests = requestsCache[batchId] || [];
arrivedRequests.push({ response, body });
if(arrivedRequests.length === 16) {
aggregateAndRespond(batchId);
}
});
Update:
const requestsCache = {};
function aggregateAndRespond(batchId) {
const hours = 2;
const batchDetails = requestsCache[batchId];
const arrivedRequests = batchDetails.requests;
// process "body" property in the "arrivedRequests" elements
const result = processRequests(arrivedRequests);
for(let arrivedRequest of arrivedRequests) {
arrivedRequest.response.json(result);
}
batchDetails.processed = true;
// batch details cannot be deleted immediately because,
// if a same batch request comes after the time out,
// it will end up creating a new batch details
// if that is desired, replace the next timer lines with "delete requestsCache[batchId];"
// otherwise, as the batch details is marked as "processed", remaining requests of same batch
// will simply be dropped.
// The batch details will also be dropped after 2 hours.
// If you think shorter duration would suffice, update accordingly.
setTimeout(function someMoreTimeLater() {
delete requestsCache[batchId];
}, hours * 60 * 60 * 1000);
}
function tryAggregateAndRespond(batchId, timeoutInMinutes, requestData) {
const defaultBatchDetails = {
processed: false,
requests: [],
aggregateTimerReference: null
};
const batchDetails = requestsCache[batchId] || defaultBatchDetails;
if(batchDetails.processed === true) {
return true;
}
batchDetails.requests.push(requestData);
// timer is reset every time the request is arrived.
// if no request is arrived for configured time after the last request,
// aggregation will kick in
// if you want the timer to be set only at the first request,
// delete the next line and uncomment the "if" block after that.
clearTimeout(batchDetails.aggregateTimerReference);
//if(batchDetails.aggregateTimerReference !== null) {
// return false;
//}
batchDetails.aggregateTimerReference = setTimeout(function someTimeLater() {
aggregateAndRespond(batchId);
}, timeoutInMinutes * 60 * 1000);
}
app.post('/client-requests', function(request, response) {
const timeoutInMinutes = 2;
const body = request.body;
const batchId = body.batchId;
const endRequest = tryAggregateAndRespond(batchId, timeoutInMinutes, { response, body });
if(endRequest === true) {
response.json({ message: 'Batch already processed', batchId });
}
});