diff --git a/src/bun.js/node/node_zlib_binding.zig b/src/bun.js/node/node_zlib_binding.zig index 75f84afbb5..9caf4b998d 100644 --- a/src/bun.js/node/node_zlib_binding.zig +++ b/src/bun.js/node/node_zlib_binding.zig @@ -101,6 +101,16 @@ pub fn CompressionStream(comptime T: type) type { // And make sure to clear it when we are done. this.this_value.set(globalThis, this_value); + // Hold strong references to the input and output buffers to prevent + // them from being garbage collected while the WorkPool thread is + // processing them. This fixes a use-after-free vulnerability where + // the JS context could terminate (e.g., worker thread termination) + // and free these buffers while compression is still in progress. + if (!arguments[1].isNull()) { + this.in_buf_value.set(globalThis, arguments[1]); + } + this.out_buf_value.set(globalThis, arguments[4]); + const vm = globalThis.bunVM(); this.task = .{ .callback = &AsyncJob.runTask }; this.poll_ref.ref(vm); @@ -133,6 +143,11 @@ pub fn CompressionStream(comptime T: type) type { this.write_in_progress = false; + // Clear the strong references to the input/output buffers now that + // the WorkPool thread has finished processing them. + this.in_buf_value.deinit(); + this.out_buf_value.deinit(); + // Clear the strong handle before we call any callbacks. const this_value = this.this_value.trySwap() orelse { debug("this_value is null in runFromJSThread", .{}); @@ -235,6 +250,8 @@ pub fn CompressionStream(comptime T: type) type { this.pending_close = false; this.closed = true; this.this_value.deinit(); + this.in_buf_value.deinit(); + this.out_buf_value.deinit(); this.stream.close(); } diff --git a/src/bun.js/node/zlib/NativeBrotli.zig b/src/bun.js/node/zlib/NativeBrotli.zig index 9bd1e60231..d24f9a5746 100644 --- a/src/bun.js/node/zlib/NativeBrotli.zig +++ b/src/bun.js/node/zlib/NativeBrotli.zig @@ -23,6 +23,10 @@ stream: Context = .{}, write_result: ?[*]u32 = null, poll_ref: CountedKeepAlive = .{}, this_value: jsc.Strong.Optional = .empty, +/// Strong reference to input buffer to prevent GC during async work +in_buf_value: jsc.Strong.Optional = .empty, +/// Strong reference to output buffer to prevent GC during async work +out_buf_value: jsc.Strong.Optional = .empty, write_in_progress: bool = false, pending_close: bool = false, closed: bool = false, @@ -109,6 +113,8 @@ pub fn params(this: *@This(), globalThis: *jsc.JSGlobalObject, callframe: *jsc.C fn deinit(this: *@This()) void { this.this_value.deinit(); + this.in_buf_value.deinit(); + this.out_buf_value.deinit(); this.poll_ref.deinit(); switch (this.stream.mode) { .BROTLI_ENCODE, .BROTLI_DECODE => this.stream.close(), diff --git a/src/bun.js/node/zlib/NativeZlib.zig b/src/bun.js/node/zlib/NativeZlib.zig index c907b0646c..da851edfad 100644 --- a/src/bun.js/node/zlib/NativeZlib.zig +++ b/src/bun.js/node/zlib/NativeZlib.zig @@ -23,6 +23,10 @@ stream: Context = .{}, write_result: ?[*]u32 = null, poll_ref: CountedKeepAlive = .{}, this_value: jsc.Strong.Optional = .empty, +/// Strong reference to input buffer to prevent GC during async work +in_buf_value: jsc.Strong.Optional = .empty, +/// Strong reference to output buffer to prevent GC during async work +out_buf_value: jsc.Strong.Optional = .empty, write_in_progress: bool = false, pending_close: bool = false, closed: bool = false, @@ -107,6 +111,8 @@ pub fn params(this: *@This(), globalThis: *jsc.JSGlobalObject, callframe: *jsc.C fn deinit(this: *@This()) void { this.this_value.deinit(); + this.in_buf_value.deinit(); + this.out_buf_value.deinit(); this.poll_ref.deinit(); this.stream.close(); bun.destroy(this); diff --git a/src/bun.js/node/zlib/NativeZstd.zig b/src/bun.js/node/zlib/NativeZstd.zig index 1acbde76b7..7bb81d161f 100644 --- a/src/bun.js/node/zlib/NativeZstd.zig +++ b/src/bun.js/node/zlib/NativeZstd.zig @@ -23,6 +23,10 @@ stream: Context = .{}, write_result: ?[*]u32 = null, poll_ref: CountedKeepAlive = .{}, this_value: jsc.Strong.Optional = .empty, +/// Strong reference to input buffer to prevent GC during async work +in_buf_value: jsc.Strong.Optional = .empty, +/// Strong reference to output buffer to prevent GC during async work +out_buf_value: jsc.Strong.Optional = .empty, write_in_progress: bool = false, pending_close: bool = false, closed: bool = false, @@ -108,6 +112,9 @@ pub fn params(this: *@This(), globalThis: *jsc.JSGlobalObject, callframe: *jsc.C } fn deinit(this: *@This()) void { + this.this_value.deinit(); + this.in_buf_value.deinit(); + this.out_buf_value.deinit(); this.poll_ref.deinit(); switch (this.stream.mode) { .ZSTD_COMPRESS, .ZSTD_DECOMPRESS => this.stream.close(), diff --git a/test/js/node/zlib/zlib-worker-uaf.test.ts b/test/js/node/zlib/zlib-worker-uaf.test.ts new file mode 100644 index 0000000000..eab06a9a00 --- /dev/null +++ b/test/js/node/zlib/zlib-worker-uaf.test.ts @@ -0,0 +1,270 @@ +import { expect, test } from "bun:test"; +import { bunEnv, bunExe } from "harness"; + +// This test verifies that zlib compression operations properly hold strong +// references to their input/output buffers during async work, preventing +// use-after-free when a worker thread is terminated while compression is +// in progress. +// +// The fix adds jsc.Strong.Optional references (in_buf_value, out_buf_value) +// to hold the buffer JSValues while the WorkPool thread processes them. + +test("brotliCompress should not UAF when worker is terminated during compression", async () => { + const workerCode = ` +const { parentPort, workerData } = require("worker_threads"); +const zlib = require("zlib"); + +const sab = workerData.sab; +const view = new Uint8Array(sab); + +// Start brotli compression with quality 11 (slow) to ensure compression +// is still in progress when we terminate +zlib.brotliCompress(view, { + params: { + [zlib.constants.BROTLI_PARAM_QUALITY]: 11, + }, +}, (err, result) => { + // This callback may not be called if worker is terminated + if (!err) { + parentPort.postMessage({ done: true, size: result.length }); + } +}); + +parentPort.postMessage({ ready: true }); +`; + + // Use inline eval to run the test + const testCode = ` +const { Worker } = require("worker_threads"); + +const workerCode = ${JSON.stringify(workerCode)}; + +// Create SharedArrayBuffer with test data +const inputData = Buffer.alloc(5 * 1024 * 1024, "A"); +let sab = new SharedArrayBuffer(inputData.length); +let view = new Uint8Array(sab); +view.set(inputData); + +const worker = new Worker(workerCode, { + eval: true, + workerData: { sab } +}); + +let terminated = false; + +worker.on("message", async (msg) => { + if (msg.ready && !terminated) { + terminated = true; + // Small delay to let compression start + await Bun.sleep(20); + + // Terminate worker while compression is in progress + await worker.terminate(); + + // Drop references and trigger GC + sab = null; + view = null; + if (global.gc) { + global.gc(); + } + + // Wait a bit for any potential UAF to manifest + await Bun.sleep(100); + + console.log("SUCCESS"); + process.exit(0); + } +}); + +worker.on("error", (err) => { + console.error("Worker error:", err); + process.exit(1); +}); + +// Timeout after 10 seconds +setTimeout(() => { + console.log("SUCCESS"); + process.exit(0); +}, 10000); +`; + + const proc = Bun.spawn({ + cmd: [bunExe(), "--expose-gc", "-e", testCode], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + // The process should complete without crashing from UAF + // Note: with ASAN builds, a UAF would cause a crash with ASAN error + expect(exitCode).toBe(0); + expect(stdout.trim()).toBe("SUCCESS"); +}); + +test("gzip should not UAF when worker is terminated during compression", async () => { + const workerCode = ` +const { parentPort, workerData } = require("worker_threads"); +const zlib = require("zlib"); + +const sab = workerData.sab; +const view = new Uint8Array(sab); + +// Start gzip compression +zlib.gzip(view, { level: 9 }, (err, result) => { + if (!err) { + parentPort.postMessage({ done: true, size: result.length }); + } +}); + +parentPort.postMessage({ ready: true }); +`; + + const testCode = ` +const { Worker } = require("worker_threads"); + +const workerCode = ${JSON.stringify(workerCode)}; + +const inputData = Buffer.alloc(2 * 1024 * 1024, "B"); +let sab = new SharedArrayBuffer(inputData.length); +let view = new Uint8Array(sab); +view.set(inputData); + +const worker = new Worker(workerCode, { + eval: true, + workerData: { sab } +}); + +let terminated = false; + +worker.on("message", async (msg) => { + if (msg.ready && !terminated) { + terminated = true; + await Bun.sleep(10); + await worker.terminate(); + + sab = null; + view = null; + if (global.gc) { + global.gc(); + } + + await Bun.sleep(100); + console.log("SUCCESS"); + process.exit(0); + } +}); + +worker.on("error", (err) => { + console.error("Worker error:", err); + process.exit(1); +}); + +setTimeout(() => { + console.log("SUCCESS"); + process.exit(0); +}, 10000); +`; + + const proc = Bun.spawn({ + cmd: [bunExe(), "--expose-gc", "-e", testCode], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + expect(exitCode).toBe(0); + expect(stdout.trim()).toBe("SUCCESS"); +}); + +test("zstd should not UAF when worker is terminated during compression", async () => { + const workerCode = ` +const { parentPort, workerData } = require("worker_threads"); +const zlib = require("zlib"); + +const sab = workerData.sab; +const view = new Uint8Array(sab); + +// Start zstd compression +zlib.zstdCompress(view, { level: 19 }, (err, result) => { + if (!err) { + parentPort.postMessage({ done: true, size: result.length }); + } +}); + +parentPort.postMessage({ ready: true }); +`; + + const testCode = ` +const { Worker } = require("worker_threads"); + +const workerCode = ${JSON.stringify(workerCode)}; + +const inputData = Buffer.alloc(2 * 1024 * 1024, "C"); +let sab = new SharedArrayBuffer(inputData.length); +let view = new Uint8Array(sab); +view.set(inputData); + +const worker = new Worker(workerCode, { + eval: true, + workerData: { sab } +}); + +let terminated = false; + +worker.on("message", async (msg) => { + if (msg.ready && !terminated) { + terminated = true; + await Bun.sleep(10); + await worker.terminate(); + + sab = null; + view = null; + if (global.gc) { + global.gc(); + } + + await Bun.sleep(100); + console.log("SUCCESS"); + process.exit(0); + } +}); + +worker.on("error", (err) => { + console.error("Worker error:", err); + process.exit(1); +}); + +setTimeout(() => { + console.log("SUCCESS"); + process.exit(0); +}, 10000); +`; + + const proc = Bun.spawn({ + cmd: [bunExe(), "--expose-gc", "-e", testCode], + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const [stdout, stderr, exitCode] = await Promise.all([ + new Response(proc.stdout).text(), + new Response(proc.stderr).text(), + proc.exited, + ]); + + expect(exitCode).toBe(0); + expect(stdout.trim()).toBe("SUCCESS"); +});