Compare commits

...

1 Commits

Author SHA1 Message Date
Claude Bot
eec08c0e6a fix(server): memory leak in streaming response proxy
Fix Strong reference leak in `RequestContext.doRenderWithBody()` when
proxying streaming responses through Bun's HTTP server.

The bug occurred in the `.Bytes` stream path when `has_received_last_chunk`
was false. A new Strong reference was created at line 1899 without first
deinit'ing the existing reference that was set at lines 1836 or 1841.
This caused `Strong.Impl` memory to leak on every streaming response.

The fix adds `this.response_body_readable_stream_ref.deinit()` before
creating the new Strong reference, properly cleaning up the old reference.

Fixes #25630

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-12 20:19:56 +00:00
2 changed files with 73 additions and 0 deletions

View File

@@ -1896,6 +1896,8 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
}
this.ref();
byte_stream.pipe = jsc.WebCore.Pipe.Wrap(@This(), onPipe).init(this);
// Deinit the old reference before creating a new one to avoid leaking the Strong.Impl
this.response_body_readable_stream_ref.deinit();
this.response_body_readable_stream_ref = jsc.WebCore.ReadableStream.Strong.init(stream, globalThis);
this.byte_stream = byte_stream;

View File

@@ -0,0 +1,71 @@
import { describe, expect, test } from "bun:test";
describe("issue 25630 - streaming response proxy memory leak", () => {
test("should not leak ReadableStream Strong references when proxying streaming responses", async () => {
// Backend server producing streaming data
const backendServer = Bun.serve({
port: 0,
async fetch(req) {
const stream = new ReadableStream({
async start(controller) {
for (let i = 0; i < 3; i++) {
await Bun.sleep(5);
controller.enqueue(new TextEncoder().encode(`data: chunk ${i}\n\n`));
}
controller.close();
},
});
return new Response(stream, {
headers: { "Content-Type": "text/event-stream" },
});
},
});
// Proxy server that forwards streaming responses (simulates SvelteKit with AI SDK)
const proxyServer = Bun.serve({
port: 0,
async fetch(req) {
const response = await fetch(backendServer.url);
// This is the pattern that leaks: passing response.body to a new Response
return new Response(response.body, {
headers: {
"Content-Type": "text/event-stream",
"Transfer-Encoding": "chunked",
},
});
},
});
try {
// Force GC and get initial ReadableStream count
Bun.gc(true);
const jsc = require("bun:jsc");
const initialCount = jsc.heapStats().objectTypeCounts.ReadableStream ?? 0;
// Make many requests through the proxy
const numRequests = 20;
for (let i = 0; i < numRequests; i++) {
const resp = await fetch(proxyServer.url);
// Consume the entire response to ensure stream completes
await resp.text();
}
// Force GC multiple times to ensure cleanup
for (let i = 0; i < 5; i++) {
Bun.gc(true);
await Bun.sleep(5);
}
const finalCount = jsc.heapStats().objectTypeCounts.ReadableStream ?? 0;
const leakedStreams = finalCount - initialCount;
// With the bug, we'd see ~numRequests leaked streams
// With the fix, we should see very few (ideally 0, but allow some slack for timing)
// The threshold of 10 is generous - without the fix, we'd see 20+ leaked streams
expect(leakedStreams).toBeLessThan(10);
} finally {
backendServer.stop(true);
proxyServer.stop(true);
}
});
});