Compare commits

...

5 Commits

Author SHA1 Message Date
Sosuke Suzuki
47b77ef510 Add missing paren 2025-12-30 15:46:32 +09:00
Sosuke Suzuki
c93fcdd998 Update WEBKIT_VERSION 2025-12-30 12:24:55 +09:00
Sosuke Suzuki
aa50149614 Update WEBKIT_VERSION 2025-12-30 02:10:03 +09:00
Sosuke Suzuki
0897d13e11 Add benchmarks 2025-12-29 23:37:41 +09:00
Sosuke Suzuki
1b3500dca2 Update bindings for reduced QueuedTask size
- Remove performMicrotaskFunction argument from BunPerformMicrotaskJob
- Async context is now handled directly in JSMicrotask.cpp
- Update queueMicrotaskJob to use 4 arguments instead of 5
2025-12-29 21:25:06 +09:00
6 changed files with 333 additions and 15 deletions

View File

@@ -0,0 +1,158 @@
// Microtask Memory Efficiency Benchmark
// Measures memory usage when many microtasks are queued simultaneously
const formatBytes = bytes => {
if (bytes < 1024) return bytes + " B";
if (bytes < 1024 * 1024) return (bytes / 1024).toFixed(2) + " KB";
return (bytes / 1024 / 1024).toFixed(2) + " MB";
};
const gc = () => {
if (typeof Bun !== "undefined") {
Bun.gc(true);
} else if (typeof global !== "undefined" && global.gc) {
global.gc();
}
};
// Measure baseline memory
gc();
await new Promise(r => setTimeout(r, 100));
gc();
const baseline = process.memoryUsage();
console.log("=== Microtask Memory Benchmark ===\n");
console.log(`Baseline RSS: ${formatBytes(baseline.rss)}`);
console.log(`Baseline Heap Used: ${formatBytes(baseline.heapUsed)}`);
console.log("");
// Test different queue sizes
const testSizes = [10_000, 50_000, 100_000, 500_000, 1_000_000];
for (const size of testSizes) {
gc();
await new Promise(r => setTimeout(r, 50));
const beforeMem = process.memoryUsage();
// Create a promise that will resolve when all microtasks complete
let resolveAll;
const allDone = new Promise(r => {
resolveAll = r;
});
let completed = 0;
const startTime = performance.now();
// Queue many microtasks at once
for (let i = 0; i < size; i++) {
queueMicrotask(() => {
completed++;
if (completed === size) {
resolveAll();
}
});
}
// Measure memory while tasks are queued (before they execute)
const duringMem = process.memoryUsage();
// Wait for all microtasks to complete
await allDone;
const elapsed = performance.now() - startTime;
gc();
await new Promise(r => setTimeout(r, 50));
const afterMem = process.memoryUsage();
const memIncrease = duringMem.rss - beforeMem.rss;
const heapIncrease = duringMem.heapUsed - beforeMem.heapUsed;
const bytesPerTask = memIncrease / size;
const heapBytesPerTask = heapIncrease / size;
console.log(`--- ${size.toLocaleString()} tasks ---`);
console.log(` Time: ${elapsed.toFixed(2)}ms`);
console.log(` RSS increase: ${formatBytes(memIncrease)} (${bytesPerTask.toFixed(1)} bytes/task)`);
console.log(` Heap increase: ${formatBytes(heapIncrease)} (${heapBytesPerTask.toFixed(1)} bytes/task)`);
console.log("");
}
// Test with Promise chains (different microtask type)
console.log("=== Promise Chain Memory Test ===\n");
for (const chainLength of [10_000, 50_000, 100_000]) {
gc();
await new Promise(r => setTimeout(r, 50));
const beforeMem = process.memoryUsage();
const startTime = performance.now();
// Create a long promise chain
let p = Promise.resolve(0);
for (let i = 0; i < chainLength; i++) {
p = p.then(v => v + 1);
}
const duringMem = process.memoryUsage();
await p;
const elapsed = performance.now() - startTime;
gc();
const afterMem = process.memoryUsage();
const memIncrease = duringMem.rss - beforeMem.rss;
const bytesPerPromise = memIncrease / chainLength;
console.log(`--- Promise chain x${chainLength.toLocaleString()} ---`);
console.log(` Time: ${elapsed.toFixed(2)}ms`);
console.log(` RSS increase: ${formatBytes(memIncrease)} (${bytesPerPromise.toFixed(1)} bytes/promise)`);
console.log("");
}
// Test with thenables (triggers PromiseResolveThenableJob)
console.log("=== Thenable Memory Test ===\n");
for (const count of [10_000, 50_000, 100_000]) {
gc();
await new Promise(r => setTimeout(r, 50));
const beforeMem = process.memoryUsage();
const startTime = performance.now();
const promises = [];
for (let i = 0; i < count; i++) {
const thenable = {
then(resolve) {
resolve(i);
},
};
promises.push(Promise.resolve(thenable));
}
const duringMem = process.memoryUsage();
await Promise.all(promises);
const elapsed = performance.now() - startTime;
gc();
const afterMem = process.memoryUsage();
const memIncrease = duringMem.rss - beforeMem.rss;
const bytesPerThenable = memIncrease / count;
console.log(`--- Thenable x${count.toLocaleString()} ---`);
console.log(` Time: ${elapsed.toFixed(2)}ms`);
console.log(` RSS increase: ${formatBytes(memIncrease)} (${bytesPerThenable.toFixed(1)} bytes/thenable)`);
console.log("");
}
console.log("=== Summary ===");
gc();
await new Promise(r => setTimeout(r, 100));
const finalMem = process.memoryUsage();
console.log(`Final RSS: ${formatBytes(finalMem.rss)}`);
console.log(`Final Heap Used: ${formatBytes(finalMem.heapUsed)}`);

View File

@@ -0,0 +1,76 @@
// Microtask Throughput Benchmark
// Measures raw throughput of microtask processing
import { bench, run } from "../runner.mjs";
// Test 1: queueMicrotask throughput
bench("queueMicrotask x1000", async () => {
let completed = 0;
await new Promise(resolve => {
for (let i = 0; i < 1000; i++) {
queueMicrotask(() => {
if (++completed === 1000) resolve();
});
}
});
});
// Test 2: Promise.resolve().then() throughput
bench("Promise.then x1000", async () => {
let p = Promise.resolve();
for (let i = 0; i < 1000; i++) {
p = p.then(() => {});
}
await p;
});
// Test 3: Mixed microtask types
bench("Mixed (queue+promise) x500 each", async () => {
let completed = 0;
const total = 1000;
await new Promise(resolve => {
for (let i = 0; i < 500; i++) {
queueMicrotask(() => {
if (++completed === total) resolve();
});
Promise.resolve().then(() => {
if (++completed === total) resolve();
});
}
});
});
// Test 4: Nested async/await
bench("async/await depth 100", async () => {
async function recurse(n) {
if (n <= 0) return n;
return await recurse(n - 1);
}
await recurse(100);
});
// Test 5: Promise.all with many promises
bench("Promise.all x100", async () => {
const promises = [];
for (let i = 0; i < 100; i++) {
promises.push(Promise.resolve(i));
}
await Promise.all(promises);
});
// Test 6: Thenable resolution (uses PromiseResolveThenableJob)
bench("Thenable x100", async () => {
const promises = [];
for (let i = 0; i < 100; i++) {
promises.push(
Promise.resolve({
then(r) {
r(i);
},
}),
);
}
await Promise.all(promises);
});
await run();

View File

@@ -0,0 +1,89 @@
// Direct measurement of QueuedTask memory overhead
// This test queues many tasks synchronously before yielding to measure peak memory
const formatMB = bytes => (bytes / 1024 / 1024).toFixed(2) + " MB";
const gc = () => {
if (typeof Bun !== "undefined") Bun.gc(true);
};
async function measureQueueMemory(taskCount) {
gc();
await new Promise(r => setTimeout(r, 100));
gc();
const baselineRSS = process.memoryUsage().rss;
// Create completion tracking
let completed = 0;
let resolveAll;
const allDone = new Promise(r => {
resolveAll = r;
});
// Queue all tasks synchronously (they won't execute until we await)
const startTime = performance.now();
for (let i = 0; i < taskCount; i++) {
queueMicrotask(() => {
if (++completed === taskCount) resolveAll();
});
}
const queueTime = performance.now() - startTime;
// Measure memory immediately after queuing (before execution)
const peakRSS = process.memoryUsage().rss;
const memUsed = peakRSS - baselineRSS;
// Now let them execute
const execStart = performance.now();
await allDone;
const execTime = performance.now() - execStart;
gc();
await new Promise(r => setTimeout(r, 50));
const finalRSS = process.memoryUsage().rss;
return {
taskCount,
memUsed,
bytesPerTask: memUsed / taskCount,
queueTime,
execTime,
peakRSS,
finalRSS,
};
}
console.log("=== QueuedTask Memory Measurement ===\n");
// Run multiple times and take median
async function runTest(count, runs = 5) {
const results = [];
for (let i = 0; i < runs; i++) {
results.push(await measureQueueMemory(count));
gc();
await new Promise(r => setTimeout(r, 200));
}
// Sort by memory used and take median
results.sort((a, b) => a.memUsed - b.memUsed);
const median = results[Math.floor(results.length / 2)];
console.log(`${count.toLocaleString()} tasks:`);
console.log(` Memory: ${formatMB(median.memUsed)} (${median.bytesPerTask.toFixed(1)} bytes/task)`);
console.log(` Queue time: ${median.queueTime.toFixed(2)}ms`);
console.log(` Exec time: ${median.execTime.toFixed(2)}ms`);
console.log("");
return median;
}
const results = [];
results.push(await runTest(100_000));
results.push(await runTest(500_000));
results.push(await runTest(1_000_000));
results.push(await runTest(2_000_000));
console.log("=== Summary ===");
const avgBytesPerTask = results.reduce((a, r) => a + r.bytesPerTask, 0) / results.length;
console.log(`Average bytes/task: ${avgBytesPerTask.toFixed(1)}`);

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 863778130931e0081a688f48e8479b8ee61b9507)
set(WEBKIT_VERSION preview-pr-123-bc52e8a3)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)

View File

@@ -1059,9 +1059,7 @@ JSC_DEFINE_HOST_FUNCTION(functionQueueMicrotask,
auto* globalObject = defaultGlobalObject(lexicalGlobalObject);
JSC::JSValue asyncContext = globalObject->m_asyncContextData.get()->getInternalField(0);
auto function = globalObject->performMicrotaskFunction();
#if ASSERT_ENABLED
ASSERT_WITH_MESSAGE(function, "Invalid microtask function");
ASSERT_WITH_MESSAGE(!callback.isEmpty(), "Invalid microtask callback");
#endif
@@ -1069,10 +1067,9 @@ JSC_DEFINE_HOST_FUNCTION(functionQueueMicrotask,
asyncContext = JSC::jsUndefined();
}
// BunPerformMicrotaskJob accepts a variable number of arguments (up to: performMicrotask, job, asyncContext, arg0, arg1).
// The runtime inspects argumentCount to determine which arguments are present, so callers may pass only the subset they need.
// Here we pass: function, callback, asyncContext.
JSC::QueuedTask task { nullptr, JSC::InternalMicrotask::BunPerformMicrotaskJob, globalObject, function, callback, asyncContext };
// BunPerformMicrotaskJob arguments: job, asyncContext, arg0 (optional), arg1 (optional)
// Here we pass: callback (job), asyncContext
JSC::QueuedTask task { nullptr, JSC::InternalMicrotask::BunPerformMicrotaskJob, globalObject, callback, asyncContext };
globalObject->vm().queueMicrotask(WTF::move(task));
return JSC::JSValue::encode(JSC::jsUndefined());

View File

@@ -3521,13 +3521,10 @@ void JSC__JSPromise__rejectOnNextTickWithHandled(JSC::JSPromise* promise, JSC::J
promise->internalField(JSC::JSPromise::Field::Flags).set(vm, promise, jsNumber(flags | JSC::JSPromise::isFirstResolvingFunctionCalledFlag));
auto* globalObject = jsCast<Zig::GlobalObject*>(promise->globalObject());
auto microtaskFunction = globalObject->performMicrotaskFunction();
auto rejectPromiseFunction = globalObject->rejectPromiseFunction();
auto asyncContext = globalObject->m_asyncContextData.get()->getInternalField(0);
#if ASSERT_ENABLED
ASSERT_WITH_MESSAGE(microtaskFunction, "Invalid microtask function");
ASSERT_WITH_MESSAGE(rejectPromiseFunction, "Invalid microtask callback");
ASSERT_WITH_MESSAGE(!value.isEmpty(), "Invalid microtask value");
#endif
@@ -3540,7 +3537,9 @@ void JSC__JSPromise__rejectOnNextTickWithHandled(JSC::JSPromise* promise, JSC::J
value = jsUndefined();
}
JSC::QueuedTask task { nullptr, JSC::InternalMicrotask::BunPerformMicrotaskJob, globalObject, microtaskFunction, rejectPromiseFunction, globalObject->m_asyncContextData.get()->getInternalField(0), promise, value };
// BunPerformMicrotaskJob arguments: job, asyncContext, arg0, arg1
// Here: rejectPromiseFunction (job), asyncContext, promise, value
JSC::QueuedTask task { nullptr, JSC::InternalMicrotask::BunPerformMicrotaskJob, globalObject, rejectPromiseFunction, asyncContext, promise, value };
globalObject->vm().queueMicrotask(WTF::move(task));
RETURN_IF_EXCEPTION(scope, );
}
@@ -5406,9 +5405,9 @@ extern "C" void JSC__JSGlobalObject__queueMicrotaskJob(JSC::JSGlobalObject* arg0
if (microtaskArgs[3].isEmpty()) {
microtaskArgs[3] = jsUndefined();
}
JSC::JSFunction* microTaskFunction = globalObject->performMicrotaskFunction();
// BunPerformMicrotaskJob arguments: job, asyncContext, arg0, arg1
// microtaskArgs layout: [0]=job, [1]=asyncContext, [2]=arg0, [3]=arg1
#if ASSERT_ENABLED
ASSERT_WITH_MESSAGE(microTaskFunction, "Invalid microtask function");
auto& vm = globalObject->vm();
if (microtaskArgs[0].isCell()) {
JSC::Integrity::auditCellFully(vm, microtaskArgs[0].asCell());
@@ -5425,10 +5424,9 @@ extern "C" void JSC__JSGlobalObject__queueMicrotaskJob(JSC::JSGlobalObject* arg0
if (microtaskArgs[3].isCell()) {
JSC::Integrity::auditCellFully(vm, microtaskArgs[3].asCell());
}
#endif
JSC::QueuedTask task { nullptr, JSC::InternalMicrotask::BunPerformMicrotaskJob, globalObject, microTaskFunction, WTF::move(microtaskArgs[0]), WTF::move(microtaskArgs[1]), WTF::move(microtaskArgs[2]), WTF::move(microtaskArgs[3]) };
JSC::QueuedTask task { nullptr, JSC::InternalMicrotask::BunPerformMicrotaskJob, globalObject, WTF::move(microtaskArgs[0]), WTF::move(microtaskArgs[1]), WTF::move(microtaskArgs[2]), WTF::move(microtaskArgs[3]) };
globalObject->vm().queueMicrotask(WTF::move(task));
}