Compare commits

...

2 Commits

Author SHA1 Message Date
autofix-ci[bot]
b5e9b41566 [autofix.ci] apply automated fixes 2025-09-30 20:49:58 +00:00
Claude Bot
78c512dac0 fix(fetch): prevent ReadableStream memory leak when reusing Response.body
Fixes a memory leak where creating a new Response with another Response's
body would create duplicate Strong references to the same ReadableStream,
preventing garbage collection.

The issue occurred in this pattern:
```js
const r1 = new Response(stream);
const r2 = new Response(r1.body);
```

Both r1 and r2 would create Strong references to the same ReadableStream
JSValue. When r1 was garbage collected, only its Strong reference would be
released, but r2's Strong reference would keep the stream alive indefinitely.

The fix transfers ownership of the ReadableStream when accessing response.body.
When `toReadableStream` is called on a Locked body with an existing stream,
it now releases the Body's Strong reference before returning the stream JSValue.
This ensures only one Strong reference exists per stream.

Fixes https://github.com/TanStack/router/issues/5289

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-09-30 20:48:21 +00:00
2 changed files with 103 additions and 1 deletions

View File

@@ -474,7 +474,16 @@ pub const Value = union(Tag) {
.Locked => {
var locked = &this.Locked;
if (locked.readable.get(globalThis)) |readable| {
return readable.value;
const stream_value = readable.value;
// Transfer ownership of the stream by releasing our Strong reference
// This prevents creating duplicate Strong references when the stream
// is passed to another Response/Request constructor
if (!locked.deinit) {
locked.deinit = true;
locked.readable.deinit();
locked.readable = .{};
}
return stream_value;
}
if (locked.promise != null or locked.action != .none) {
return jsc.WebCore.ReadableStream.used(globalThis);

View File

@@ -0,0 +1,93 @@
// Regression test for https://github.com/TanStack/router/issues/5289
// Memory leak when creating a new Response with another Response's body
import { heapStats } from "bun:jsc";
import { expect, test } from "bun:test";
test("Response body ReadableStream should not create duplicate Strong references", () => {
// Get baseline stream count
Bun.gc(true);
const baselineStats = heapStats();
const baselineStreams = baselineStats.protectedObjectTypeCounts.ReadableStream || 0;
// Create Response pairs using the problematic pattern
for (let i = 0; i < 100; i++) {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode(`data${i}`));
controller.close();
},
});
const originalResponse = new Response(stream);
// This pattern was causing a memory leak - creating duplicate Strong references
new Response(originalResponse.body);
}
const afterCreateStats = heapStats();
const streamsAfterCreate = afterCreateStats.protectedObjectTypeCounts.ReadableStream || 0;
const createdStreams = streamsAfterCreate - baselineStreams;
// Before the fix: would create 200 Strong references (2 per stream)
// After the fix: should create ~100 Strong references (1 per stream, as r1 releases its ref)
// We allow some margin for GC timing, but it should be closer to 100 than 200
expect(createdStreams).toBeLessThan(150);
expect(createdStreams).toBeGreaterThan(50);
// Now force GC and verify streams are cleaned up
Bun.gc(true);
const afterGCStats = heapStats();
const streamsAfterGC = afterGCStats.protectedObjectTypeCounts.ReadableStream || 0;
// After GC, should have very few streams left (close to baseline)
expect(streamsAfterGC - baselineStreams).toBeLessThan(10);
});
test("Bun.serve with Response body reuse should not leak", async () => {
let requestCount = 0;
const server = Bun.serve({
port: 0,
fetch(req) {
requestCount++;
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode("hello"));
controller.close();
},
});
const originalResponse = new Response(stream);
// This pattern in serve handlers was causing the leak
return new Response(originalResponse.body, {
status: originalResponse.status,
statusText: originalResponse.statusText,
headers: originalResponse.headers,
});
},
});
try {
// Get baseline
Bun.gc(true);
const baselineStats = heapStats();
const baselineStreams = baselineStats.protectedObjectTypeCounts.ReadableStream || 0;
// Make many requests
for (let i = 0; i < 50; i++) {
await fetch(`http://localhost:${server.port}`);
}
expect(requestCount).toBe(50);
// Force GC and check for leaks
Bun.gc(true);
const stats = heapStats();
const streamCount = stats.protectedObjectTypeCounts.ReadableStream || 0;
// Should be very few protected streams (close to baseline)
expect(streamCount - baselineStreams).toBeLessThan(5);
} finally {
server.stop();
}
});