This code doesn't work as expected:
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function diff(expected) {
let x = expected - Date.now();
if (x > 0) {
return `earlier ${x} ms`;
} else if (x < 0) {
return `late ${-x} ms`;
} else {
return `just in time`;
}
}
start = Date.now();
async function demo() {
let loop_count = 5;
for (let i = 0; i < loop_count; i ) {
console.log(diff(start i * 1000) `: Waited ${i} seconds...`);
await sleep(i * 1000);
}
console.log(diff(start loop_count * 1000) ': Done');
}
demo();
Output is:
$ node test.js
just in time: Waited 0 seconds...
earlier 993 ms: Waited 1 seconds...
earlier 993 ms: Waited 2 seconds...
late 10 ms: Waited 3 seconds...
late 2011 ms: Waited 4 seconds...
late 5013 ms: Done
Where does this difference come from?
CodePudding user response:
Where does this difference come from?
Because you measure it incorrectly. First of all you do console.log(diff(start i * 1000) `: Waited ${i} seconds...`);
before the await sleep(i * 1000);
. This test has to be after the sleep
.
The other problem is that you don't change the start
value and that you always you that same start
value as the reference for each iteration.
So after i>1
you will get wrong results: You already waited 1
second, so for i=2
the total time elapsed from start
is (1 2)
seconds, for i=3
to total time elapsed from start
is (1 2 3)
seconds, ... .
Updating the code so that the previous seconds slept are also included in the calculation gives the expected result:
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function diff(expected) {
let x = expected - Date.now();
if (x > 0) {
return `earlier ${x} ms`;
} else if (x < 0) {
return `late ${-x} ms`;
} else {
return `just in time`;
}
}
function sumUp(num) {
let res = 0;
for(let i=0; i<=num ; i ) {
res = i;
}
return res;
}
start = Date.now();
async function demo() {
let loop_count = 5;
for (let i = 0; i < loop_count; i ) {
await sleep(i * 1000);
console.log(diff(start sumUp(i) * 1000) `: Waited ${i} seconds...`);
}
console.log(diff(start sumUp(loop_count) * 1000) ': Done');
}
demo();
The result will always be late
because setTimeout
will wait for at least the given amount of time. And for the way you measure that error will pile up for each called timer.