From 461ad886bd1442652155013de2d51281c3066a67 Mon Sep 17 00:00:00 2001 From: SUZUKI Sosuke Date: Tue, 13 Jan 2026 07:41:58 +0900 Subject: [PATCH] fix(http): fix Strong reference leak in server response streaming (#25965) ## Summary Fix a memory leak in `RequestContext.doRenderWithBody()` where `Strong.Impl` memory was leaked when proxying streaming responses through Bun's HTTP server. ## Problem When a streaming response (e.g., from a proxied fetch request) was forwarded through Bun's server: 1. `response_body_readable_stream_ref` was initialized at line 1836 (from `lock.readable`) or line 1841 (via `Strong.init()`) 2. For `.Bytes` streams with `has_received_last_chunk=false`, a **new** Strong reference was created at line 1902 3. The old Strong reference was **never deinit'd**, causing `Strong.Impl` memory to leak This leak accumulated over time with every streaming response proxied through the server. ## Solution Add `this.response_body_readable_stream_ref.deinit()` before creating the new Strong reference. This is safe because: - `stream` exists as a stack-local variable - JSC's conservative GC tracks stack-local JSValues - No GC can occur between consecutive synchronous Zig statements - Therefore, `stream` won't be collected between `deinit()` and `Strong.init()` ## Test Added `test/js/web/fetch/server-response-stream-leak.test.ts` which: - Creates a backend server that returns delayed streaming responses - Creates a proxy server that forwards the streaming responses - Makes 200 requests and checks that ReadableStream objects don't accumulate - Fails on system Bun v1.3.5 (202 leaked), passes with the fix ## Related Similar to the Strong reference leak fixes in: - #23313 (fetch memory leak) - #25846 (fetch cyclic reference leak) --- src/bun.js/api/server/RequestContext.zig | 3 ++ .../fetch/server-response-stream-leak.test.ts | 52 +++++++++++++++++++ 2 files changed, 55 insertions(+) create mode 100644 test/js/web/fetch/server-response-stream-leak.test.ts diff --git a/src/bun.js/api/server/RequestContext.zig b/src/bun.js/api/server/RequestContext.zig index 4d74dcdeeb..04829d8163 100644 --- a/src/bun.js/api/server/RequestContext.zig +++ b/src/bun.js/api/server/RequestContext.zig @@ -1896,6 +1896,9 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, } this.ref(); byte_stream.pipe = jsc.WebCore.Pipe.Wrap(@This(), onPipe).init(this); + // Deinit the old Strong reference before creating a new one + // to avoid leaking the Strong.Impl memory + this.response_body_readable_stream_ref.deinit(); this.response_body_readable_stream_ref = jsc.WebCore.ReadableStream.Strong.init(stream, globalThis); this.byte_stream = byte_stream; diff --git a/test/js/web/fetch/server-response-stream-leak.test.ts b/test/js/web/fetch/server-response-stream-leak.test.ts new file mode 100644 index 0000000000..ce8d747fa1 --- /dev/null +++ b/test/js/web/fetch/server-response-stream-leak.test.ts @@ -0,0 +1,52 @@ +import { heapStats } from "bun:jsc"; +import { describe, expect, test } from "bun:test"; + +describe("Bun.serve response stream leak", () => { + test("proxy server forwarding streaming response should not leak", async () => { + // Backend server that returns a streaming response with delay + await using backend = Bun.serve({ + port: 0, + fetch(req) { + const stream = new ReadableStream({ + async start(controller) { + controller.enqueue(new TextEncoder().encode("chunk1")); + await Bun.sleep(10); + controller.enqueue(new TextEncoder().encode("chunk2")); + controller.close(); + }, + }); + return new Response(stream); + }, + }); + + // Proxy server that forwards the response body stream + await using proxy = Bun.serve({ + port: 0, + async fetch(req) { + const backendResponse = await fetch(`http://localhost:${backend.port}/`); + return new Response(backendResponse.body); + }, + }); + + const url = `http://localhost:${proxy.port}/`; + + async function leak() { + const response = await fetch(url); + return await response.text(); + } + + for (let i = 0; i < 200; i++) { + await leak(); + } + + await Bun.sleep(10); + Bun.gc(true); + await Bun.sleep(10); + Bun.gc(true); + + const readableStreamCount = heapStats().objectTypeCounts.ReadableStream || 0; + const responseCount = heapStats().objectTypeCounts.Response || 0; + expect(readableStreamCount).toBeLessThanOrEqual(50); + expect(responseCount).toBeLessThanOrEqual(50); + }); +});