Compare commits

...

9 Commits

Author SHA1 Message Date
Jarred Sumner
35551f888c Revert "ok"
This reverts commit 17088e24d8.
2025-10-14 19:54:13 -07:00
Jarred Sumner
b962a28a32 Revert "dont over report"
This reverts commit 4f6551ba58.
2025-10-14 19:54:06 -07:00
Jarred Sumner
0d8bb27f45 Update HTTPThread.zig 2025-10-14 19:53:26 -07:00
Ciro Spaciari
4f6551ba58 dont over report 2025-10-14 19:46:12 -07:00
Ciro Spaciari
17088e24d8 ok 2025-10-14 18:27:30 -07:00
Ciro Spaciari
d4cdc4fd35 opsie 2025-10-14 18:27:30 -07:00
autofix-ci[bot]
57d34d62af [autofix.ci] apply automated fixes 2025-10-14 18:27:30 -07:00
Ciro Spaciari
de84549a16 test 2025-10-14 18:27:30 -07:00
Ciro Spaciari
c4d46955e6 test 2025-10-14 18:27:30 -07:00
5 changed files with 46 additions and 26 deletions

View File

@@ -250,30 +250,6 @@ pub const ArrayBuffer = extern struct {
return this.value;
}
// If it's not a mimalloc heap buffer, we're not going to call a deallocator
if (this.len > 0 and !bun.mimalloc.mi_is_in_heap_region(this.ptr)) {
log("toJS but will never free: {d} bytes", .{this.len});
if (this.typed_array_type == .ArrayBuffer) {
return makeArrayBufferWithBytesNoCopy(
ctx,
this.ptr,
this.byte_len,
null,
null,
);
}
return makeTypedArrayWithBytesNoCopy(
ctx,
this.typed_array_type.toTypedArrayType(),
this.ptr,
this.byte_len,
null,
null,
);
}
return this.toJSUnchecked(ctx);
}

View File

@@ -432,6 +432,8 @@ pub const FetchTasklet = struct {
readable.ptr.Bytes.onData(
.{
.owned_and_done = bun.ByteList.moveFromList(scheduled_response_buffer),
// Investigate: owned_and_done is the correct behavior, is having bigger memory usage than temporary_and_done
// .temporary_and_done = bun.ByteList.fromBorrowedSliceDangerous(chunk),
},
bun.default_allocator,
);

View File

@@ -195,7 +195,7 @@ pub fn init(opts: *const InitOpts) void {
pub fn onStart(opts: InitOpts) void {
Output.Source.configureNamedThread("HTTP Client");
bun.http.default_arena = Arena.init();
bun.http.default_allocator = bun.http.default_arena.allocator();
bun.http.default_allocator = bun.default_allocator;
const loop = bun.jsc.MiniEventLoop.initGlobal(null, null);

View File

@@ -0,0 +1,24 @@
import { expect } from "bun:test";
let rssSample = 0;
const url = process.env.SERVER_URL;
const maxMemoryIncrease = parseInt(process.env.MAX_MEMORY_INCREASE || "0", 10);
for (let i = 0; i < 500; i++) {
let response = await fetch(url);
const reader = response.body.getReader();
while (true) {
const { done } = await reader.read();
if (done) break;
await Bun.sleep(1);
}
await Bun.sleep(1);
const memoryUsage = process.memoryUsage().rss / 1024 / 1024;
// memory should be stable after X iterations
if (i == 250) rssSample = memoryUsage;
}
await Bun.sleep(1);
Bun.gc(true);
const memoryUsage = process.memoryUsage().rss / 1024 / 1024;
expect(rssSample).toBeGreaterThanOrEqual(memoryUsage - maxMemoryIncrease);
console.log("done");

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tls as COMMON_CERT, gc, isCI } from "harness";
import { bunEnv, bunExe, bunRun, tls as COMMON_CERT, gc, isCI } from "harness";
import { once } from "node:events";
import { createServer } from "node:http";
import { join } from "node:path";
@@ -184,3 +184,21 @@ test("do not leak", async () => {
}
}, 1e3);
});
test("should not leak using readable stream", async () => {
const buffer = Buffer.alloc(1024 * 128, "b");
using server = Bun.serve({
port: 0,
fetch: req => {
return new Response(buffer);
},
});
const { stdout, stderr } = bunRun(join(import.meta.dir, "fetch-leak-test-fixture-6.js"), {
...bunEnv,
SERVER_URL: server.url.href,
MAX_MEMORY_INCREASE: "5", // in MB
});
expect(stderr).toBe("");
expect(stdout).toContain("done");
});