I've got multiple long running tasks, as in longer than ~10ms, that impact the responsiveness of the browser. The worst ones, such as loading and parsing 3D models from files, are already offloaded to Web Workers so that they won't affect the render loop.
Some tasks, however, aren't easily ported to Workers and therefore have to be distributed over multiple frames in the main thread. Instead of doing a 1 second task in one go, I'd like to split it into ~5ms packages to give the browser the chance to execute other events (mouse move, requestAnimationFrame, ...) in between.
Generator functions, in combination with setTimeout, seem to be the easiest way to do that. I've hacked something together that does the job but I'm wondering if there is a nicer/cleaner way to solve this issue.
The code below computes the mean of 100 million invocations of Math.random(). The first version computes the mean in one go, but stalls the browser for ~1.3 seconds. The second version abuses generator functions to yield after every 5 million points, thereby giving the browser the chance to execute other events (mouse move) in between. The generator function is repeatedly called through a setTimout loop, until it has processed all 100 million samples.
<html>
<head></head>
<body>
<script>
let samples = 100 * 1000 * 1000;
{ // run complete task at once, possibly stalling the browser
function run(){
let start = performance.now();
let sum = 0.0;
for(let i = 0; i < samples; i++){
sum = sum + Math.random();
}
let mean = sum / samples;
let duration = performance.now() - start;
console.log(`single-run: duration: ${duration}`);
console.log(`single-run: sum: ${sum}`);
console.log(`single-run: mean: ${mean}`);
}
run();
}
{ // run in smaller packages to keep browser responsive
// move mouse to check if this callback is executed in between
document.body.addEventListener("mousemove", () => {
console.log("mouse moved");
});
function * distributedRun(){
let start = performance.now();
let packageSize = 5 * 1000 * 1000;
let sum = 0.0;
for(let i = 0; i < samples; i++){
sum = sum + Math.random();
if((i % packageSize) === 0){
yield sum;
}
}
let mean = sum / samples;
let duration = performance.now() - start;
console.log(`distributed-run: duration: ${duration}`);
console.log(`distributed-run: sum: ${sum}`);
console.log(`distributed-run: mean: ${mean}`);
yield sum;
}
let generatorInstance = distributedRun();
function loop(){
let result = generatorInstance.next();
console.log(`distributed-run intermediate result: ${result.value}`);
if(!result.done){
setTimeout(loop, 0);
}
}
loop();
}
</script>
</body>
</html>
ES2018 has async iterators which kind of sound like what I'm looking for but I'm not sure if they're really meant for this kind of problem. Using it like this still stalls the browser:
for await (const result of distributedRun()) {
...
}
(Tried some async's here and there and at the runDistributed() function but tbh, I'm still learning the details of await/async)
Here is a slightly modified version of your code. If you ajust chunk
depending on your computation complexity and the amount of lag you can allow, it should work fine.
let samples = 100 * 1000 * 1000;
let chunk = 100000;
async function run() {
let sum = 0.0;
for(let i=0; i<samples; i++) {
sum += Math.random();
if (i % chunk === 0) {
console.log("finished chunk")
// wait for the next tick
await new Promise(res => setTimeout(res, 0));
}
}
let mean = sum / samples;
console.log("finished computation", mean);
}
setTimeout(run, 0);
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With