From c806cda37f0389c37206734666df8c53f557b661 Mon Sep 17 00:00:00 2001 From: Andrew Bradley Date: Tue, 18 Jan 2022 22:11:00 -0500 Subject: [PATCH 1/3] fix --- src/node_platform.cc | 36 ++++++++++++++++++++++++++++++++---- src/node_platform.h | 3 +++ 2 files changed, 35 insertions(+), 4 deletions(-) diff --git a/src/node_platform.cc b/src/node_platform.cc index 9787cbb3edc2e2..c5594dc254fd55 100644 --- a/src/node_platform.cc +++ b/src/node_platform.cc @@ -441,10 +441,30 @@ void NodePlatform::DrainTasks(Isolate* isolate) { std::shared_ptr per_isolate = ForNodeIsolate(isolate); if (!per_isolate) return; - do { - // Worker tasks aren't associated with an Isolate. - worker_thread_task_runner_->BlockingDrain(); - } while (per_isolate->FlushForegroundTasksInternal()); + // Note: does not execute delayed tasks + while(true) { + bool has_background_tasks = per_isolate->HasPendingBackgroundTasks(); + bool did_foreground_work = per_isolate->FlushForegroundTasksInternal(); + + // Guaranteed no tasks remain, because background had nothing and foreground cannot possibly have posted to background + if(!has_background_tasks && !did_foreground_work) break; + + // Background work is in-progress, and we're sure we did not execute the resulting tasks on the foreground. + // Wait for them to arrive + if(has_background_tasks && !did_foreground_work) { + per_isolate->WaitForNonDelayedForegroundTasks(); + } + + // Only unaccounted possibility is that we did_foreground_work; loop again + } +} + +bool PerIsolatePlatformData::HasPendingBackgroundTasks() { + return isolate_->HasPendingBackgroundTasks(); +} + +void PerIsolatePlatformData::WaitForNonDelayedForegroundTasks() { + foreground_tasks_->Wait(); } bool PerIsolatePlatformData::FlushForegroundTasksInternal() { @@ -572,6 +592,14 @@ void TaskQueue::Push(std::unique_ptr task) { tasks_available_.Signal(scoped_lock); } +template +std::unique_ptr TaskQueue::Wait() { + Mutex::ScopedLock scoped_lock(lock_); + while (task_queue_.empty() && !stopped_) { + tasks_available_.Wait(scoped_lock); + } +} + template std::unique_ptr TaskQueue::Pop() { Mutex::ScopedLock scoped_lock(lock_); diff --git a/src/node_platform.h b/src/node_platform.h index 4a05f3bba58c8e..47c1d1a8fe06a3 100644 --- a/src/node_platform.h +++ b/src/node_platform.h @@ -29,6 +29,7 @@ class TaskQueue { std::unique_ptr Pop(); std::unique_ptr BlockingPop(); std::queue> PopAll(); + void Wait(); void NotifyOfCompletion(); void BlockingDrain(); void Stop(); @@ -80,6 +81,8 @@ class PerIsolatePlatformData : // flushing. bool FlushForegroundTasksInternal(); + bool HasPendingBackgroundTasks(); + const uv_loop_t* event_loop() const { return loop_; } private: From b9af8c93574d1324286821b57d09712a83b239a3 Mon Sep 17 00:00:00 2001 From: Andrew Bradley Date: Tue, 18 Jan 2022 23:21:49 -0500 Subject: [PATCH 2/3] fix --- deps/v8/src/execution/isolate.cc | 2 ++ src/node_platform.cc | 4 ++-- src/node_platform.h | 2 ++ 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/deps/v8/src/execution/isolate.cc b/deps/v8/src/execution/isolate.cc index 801cb8b1322fee..1f36b3b78b592b 100644 --- a/deps/v8/src/execution/isolate.cc +++ b/deps/v8/src/execution/isolate.cc @@ -3132,7 +3132,9 @@ void Isolate::Deinit() { #endif // V8_ENABLE_WEBASSEMBLY if (concurrent_recompilation_enabled()) { + printf("starting optimizing_compile_dispatcher_->Stop();\n"); optimizing_compile_dispatcher_->Stop(); + printf("finished optimizing_compile_dispatcher_->Stop();\n"); delete optimizing_compile_dispatcher_; optimizing_compile_dispatcher_ = nullptr; } diff --git a/src/node_platform.cc b/src/node_platform.cc index c5594dc254fd55..98f9d023a63fd5 100644 --- a/src/node_platform.cc +++ b/src/node_platform.cc @@ -464,7 +464,7 @@ bool PerIsolatePlatformData::HasPendingBackgroundTasks() { } void PerIsolatePlatformData::WaitForNonDelayedForegroundTasks() { - foreground_tasks_->Wait(); + foreground_tasks_.Wait(); } bool PerIsolatePlatformData::FlushForegroundTasksInternal() { @@ -593,7 +593,7 @@ void TaskQueue::Push(std::unique_ptr task) { } template -std::unique_ptr TaskQueue::Wait() { +void TaskQueue::Wait() { Mutex::ScopedLock scoped_lock(lock_); while (task_queue_.empty() && !stopped_) { tasks_available_.Wait(scoped_lock); diff --git a/src/node_platform.h b/src/node_platform.h index 47c1d1a8fe06a3..8310ab48caad3e 100644 --- a/src/node_platform.h +++ b/src/node_platform.h @@ -83,6 +83,8 @@ class PerIsolatePlatformData : bool HasPendingBackgroundTasks(); + void WaitForNonDelayedForegroundTasks(); + const uv_loop_t* event_loop() const { return loop_; } private: From 6721f2d88971ba06cd6c047d2f51ea473488a42a Mon Sep 17 00:00:00 2001 From: Andrew Bradley Date: Tue, 18 Jan 2022 23:45:59 -0500 Subject: [PATCH 3/3] add reproduction scripts --- reproduction/.gitignore | 3 +++ reproduction/from-gh-issue-2.js | 26 +++++++++++++++++++++++++ reproduction/from-gh-issue.js | 9 +++++++++ reproduction/messy.js | 34 +++++++++++++++++++++++++++++++++ reproduction/package.json | 5 +++++ reproduction/run.sh | 17 +++++++++++++++++ 6 files changed, 94 insertions(+) create mode 100644 reproduction/.gitignore create mode 100644 reproduction/from-gh-issue-2.js create mode 100644 reproduction/from-gh-issue.js create mode 100644 reproduction/messy.js create mode 100644 reproduction/package.json create mode 100755 reproduction/run.sh diff --git a/reproduction/.gitignore b/reproduction/.gitignore new file mode 100644 index 00000000000000..9d893916f79a6f --- /dev/null +++ b/reproduction/.gitignore @@ -0,0 +1,3 @@ +/node_modules +/package-lock.json +/yarn.lock diff --git a/reproduction/from-gh-issue-2.js b/reproduction/from-gh-issue-2.js new file mode 100644 index 00000000000000..9c7de449608a85 --- /dev/null +++ b/reproduction/from-gh-issue-2.js @@ -0,0 +1,26 @@ +// Issue described here: +// https://github.com/nodejs/node/issues/36616#issuecomment-757929741 + +// Any sufficiently large .wasm file +const wasmPath = require.resolve('@swc/wasm/wasm_bg.wasm'); + +const fs = require('fs'); + +// Workaround #36616 +const timerId = setInterval(() => {}, 60000); + +process.on('exit', () => { + console.log(new Date(), 'process exit'); +}); + +process.on('beforeExit', () => { + console.log(new Date(), 'process beforeExit'); +}); + +WebAssembly.compile(fs.readFileSync(wasmPath)).then(() => { + console.log(new Date(), 'compiled (liftoff)'); + clearInterval(timerId); + if (process.env.EXPLICIT_EXIT) process.exit(); +}); + +console.log(new Date(), 'WebAssembly.compile()'); diff --git a/reproduction/from-gh-issue.js b/reproduction/from-gh-issue.js new file mode 100644 index 00000000000000..a0bc2958c01d01 --- /dev/null +++ b/reproduction/from-gh-issue.js @@ -0,0 +1,9 @@ +// Issue described here: +// https://github.com/nodejs/node/issues/36616#issue-774212139 + +// Any sufficiently large .wasm file +const wasmPath = require.resolve('@swc/wasm/wasm_bg.wasm'); + +const fs = require("fs") +const data = fs.readFileSync(wasmPath); +WebAssembly.compile(data).then(()=>console.log("module ready")) diff --git a/reproduction/messy.js b/reproduction/messy.js new file mode 100644 index 00000000000000..687cad25ab9b32 --- /dev/null +++ b/reproduction/messy.js @@ -0,0 +1,34 @@ +// Messy script I have been using to demo the bug and verify the fix + +const swcInvocations = +process.argv[2]; +const setTimeoutMs = +process.argv[3]; + +process.on('beforeExit', () => { + console.log('beforeExit emitted'); +}); + +process.on('exit', () => { + console.log('exit emitted'); + console.log(JSON.stringify(performance.toJSON(), null, 2)); +}); + +if(!Number.isNaN(setTimeoutMs)) { + console.log(`setting ${setTimeoutMs}ms timeout`); + setTimeout(() => { + console.log(`firing ${setTimeoutMs}ms timeout`); + }, setTimeoutMs); +} + +console.time('require("@swc/wasm")'); +const swc = require('@swc/wasm'); +console.timeEnd('require("@swc/wasm")'); + +// any JS file; doesn't have to be this one. Just want to make SWC actually do some work in case this affects +// V8's optimization heuristics. +const src = require('fs').readFileSync(__filename, 'utf8'); + +console.time(`invoke swc ${ swcInvocations } times`); +for(let i = 0; i < swcInvocations; i++) { + swc.transformSync(src, {}); +} +console.timeEnd(`invoke swc ${ swcInvocations } times`); diff --git a/reproduction/package.json b/reproduction/package.json new file mode 100644 index 00000000000000..22efe57ad436f2 --- /dev/null +++ b/reproduction/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "@swc/wasm": "^1.2.129" + } +} diff --git a/reproduction/run.sh b/reproduction/run.sh new file mode 100755 index 00000000000000..7e049e7b3f1f96 --- /dev/null +++ b/reproduction/run.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euxo pipefail + +# This script runs 3x reproductions demonstrating a few flavors of this bug. +# I recommend using it as an example and instead running the following commands manually. + +# Assuming you've built node from source and the binary resides here: +local_node=../out/Release/node + +yarn +node ./from-gh-issue.js +node ./from-gh-issue-2.js +node ./messy.js 100 + +"$local_node" ./from-gh-issue.js +"$local_node" ./from-gh-issue-2.js +"$local_node" ./messy.js 100