diff --git a/src/bun.js/api/server/RequestContext.zig b/src/bun.js/api/server/RequestContext.zig index 4d74dcdeeb..04829d8163 100644 --- a/src/bun.js/api/server/RequestContext.zig +++ b/src/bun.js/api/server/RequestContext.zig @@ -1896,6 +1896,9 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, } this.ref(); byte_stream.pipe = jsc.WebCore.Pipe.Wrap(@This(), onPipe).init(this); + // Deinit the old Strong reference before creating a new one + // to avoid leaking the Strong.Impl memory + this.response_body_readable_stream_ref.deinit(); this.response_body_readable_stream_ref = jsc.WebCore.ReadableStream.Strong.init(stream, globalThis); this.byte_stream = byte_stream; diff --git a/test/js/web/fetch/server-response-stream-leak.test.ts b/test/js/web/fetch/server-response-stream-leak.test.ts new file mode 100644 index 0000000000..ce8d747fa1 --- /dev/null +++ b/test/js/web/fetch/server-response-stream-leak.test.ts @@ -0,0 +1,52 @@ +import { heapStats } from "bun:jsc"; +import { describe, expect, test } from "bun:test"; + +describe("Bun.serve response stream leak", () => { + test("proxy server forwarding streaming response should not leak", async () => { + // Backend server that returns a streaming response with delay + await using backend = Bun.serve({ + port: 0, + fetch(req) { + const stream = new ReadableStream({ + async start(controller) { + controller.enqueue(new TextEncoder().encode("chunk1")); + await Bun.sleep(10); + controller.enqueue(new TextEncoder().encode("chunk2")); + controller.close(); + }, + }); + return new Response(stream); + }, + }); + + // Proxy server that forwards the response body stream + await using proxy = Bun.serve({ + port: 0, + async fetch(req) { + const backendResponse = await fetch(`http://localhost:${backend.port}/`); + return new Response(backendResponse.body); + }, + }); + + const url = `http://localhost:${proxy.port}/`; + + async function leak() { + const response = await fetch(url); + return await response.text(); + } + + for (let i = 0; i < 200; i++) { + await leak(); + } + + await Bun.sleep(10); + Bun.gc(true); + await Bun.sleep(10); + Bun.gc(true); + + const readableStreamCount = heapStats().objectTypeCounts.ReadableStream || 0; + const responseCount = heapStats().objectTypeCounts.Response || 0; + expect(readableStreamCount).toBeLessThanOrEqual(50); + expect(responseCount).toBeLessThanOrEqual(50); + }); +});