Compare commits

...

5 Commits

Author SHA1 Message Date
Don Isaac
caa6a7c5ac wip 2025-01-06 09:52:09 -08:00
Don Isaac
85b97e0303 Merge branch 'main' of github.com:oven-sh/bun into don/fix/streams-leak 2025-01-06 09:50:24 -08:00
Don Isaac
85336764ff Merge branch 'main' of github.com:oven-sh/bun into don/fix/streams-leak 2025-01-03 11:40:52 -08:00
Don Isaac
31482ec58c wip 2024-12-30 23:46:19 -05:00
Don Isaac
327451429d fix: memory leak when reading chunks from a stream 2024-12-30 23:17:07 -05:00
10 changed files with 326 additions and 55 deletions

9
.vscode/launch.json generated vendored
View File

@@ -1127,6 +1127,15 @@
"action": "openExternally",
},
},
{
"type": "bun",
"request": "launch",
"name": "bun debug [file]",
// "program": "${file}",
"runtimeArgs": ["run", "${file}"],
"runtime": "${workspaceFolder}/build/debug/bun-debug",
"cwd": "${workspaceFolder}",
},
{
"type": "cppvsdbg",
"sourceFileMap": {

View File

@@ -2,7 +2,7 @@
// Editor
"editor.tabSize": 2,
"editor.insertSpaces": true,
"editor.formatOnSave": true,
// "editor.formatOnSave": true,
"editor.formatOnSaveMode": "file",
// Search
@@ -17,6 +17,7 @@
// This will fill up your whole search history.
"test/js/node/test/fixtures": true,
"test/js/node/test/common": true,
"./build/debug/codegen/**": false,
},
"search.followSymlinks": false,
"search.useIgnoreFiles": true,

View File

@@ -66,9 +66,10 @@ TRIPLET = $(OS_NAME)-$(ARCH_NAME)
PACKAGE_NAME = bun-$(TRIPLET)
PACKAGES_REALPATH = $(realpath packages)
PACKAGE_DIR = $(PACKAGES_REALPATH)/$(PACKAGE_NAME)
BUILD_REALPATH = $(realpath build)
DEBUG_PACKAGE_DIR = $(PACKAGES_REALPATH)/debug-$(PACKAGE_NAME)
RELEASE_BUN = $(PACKAGE_DIR)/bun
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)
DEBUG_BIN = $(BUILD_REALPATH)/debug
DEBUG_BUN = $(DEBUG_BIN)/bun-debug
BUILD_ID = $(shell cat ./src/build-id)
PACKAGE_JSON_VERSION = $(BUN_BASE_VERSION).$(BUILD_ID)
@@ -905,7 +906,7 @@ zig-win32:
# Hardened runtime will not work with debugging
bun-codesign-debug:
codesign --entitlements $(realpath entitlements.debug.plist) --force --timestamp --sign "$(CODESIGN_IDENTITY)" -vvvv --deep --strict $(DEBUG_BUN)
codesign --entitlements $(realpath entitlements.debug.plist) --force --timestamp --sign - -vvvv --deep --strict $(DEBUG_BUN)
bun-codesign-release-local:
codesign --entitlements $(realpath entitlements.plist) --options runtime --force --timestamp --sign "$(CODESIGN_IDENTITY)" -vvvv --deep --strict $(RELEASE_BUN)

View File

@@ -708,6 +708,8 @@ pub const StreamResult = union(Tag) {
switch (this.*) {
.owned => |*owned| owned.deinitWithAllocator(bun.default_allocator),
.owned_and_done => |*owned_and_done| owned_and_done.deinitWithAllocator(bun.default_allocator),
// .temporary => |*temporary| temporary.deinitWithAllocator(bun.default_allocator),
// .temporary_and_done => |*temporary_and_done| temporary_and_done.deinitWithAllocator(bun.default_allocator),
.err => |err| {
if (err == .JSValue) {
err.JSValue.unprotect();
@@ -1020,6 +1022,11 @@ pub const StreamResult = union(Tag) {
const value = result.toJS(globalThis);
value.ensureStillAlive();
// switch (result.*) {
// .temporary => |*temporary| temporary.deinitWithAllocator(bun.default_allocator),
// .temporary_and_done => |*temporary_and_done| temporary_and_done.deinitWithAllocator(bun.default_allocator),
// else => {},
// }
result.* = .{ .temporary = .{} };
promise.resolve(globalThis, value);
},

View File

@@ -69,6 +69,7 @@ pub fn PosixPipeReader(
readFile(parent, resizable_buffer, fd, size_hint, received_hup);
},
.socket => {
// leak
readSocket(parent, resizable_buffer, fd, size_hint, received_hup);
},
.pipe => {
@@ -112,6 +113,7 @@ pub fn PosixPipeReader(
}
fn readSocket(parent: *This, resizable_buffer: *std.ArrayList(u8), fd: bun.FileDescriptor, size_hint: isize, received_hup: bool) void {
// leak
return readWithFn(parent, resizable_buffer, fd, size_hint, received_hup, .socket, wrapReadFn(bun.sys.recvNonBlock));
}

34
src/js/builtins.d.ts vendored
View File

@@ -59,6 +59,9 @@ declare function $getByIdDirect<T = any>(obj: any, key: string): T;
* gets a private property on an object. translates to the `op_get_by_id_direct` bytecode.
*
* TODO: clarify what private means exactly.
* @param obj The object the property belongs to
* @param key The `[[internal]]` property. Must be a string literal or WebKit will throw.
* @see [WebKit `emit_intrinsic_getByIdDirectPrivate`](https://github.com/oven-sh/WebKit/blob/e1a802a2287edfe7f4046a9dd8307c8b59f5d816/Source/JavaScriptCore/bytecompiler/NodesCodegen.cpp#L1461)
*/
declare function $getByIdDirectPrivate<T = any>(obj: any, key: string): T;
/**
@@ -161,8 +164,26 @@ declare function $toPropertyKey(x: any): PropertyKey;
* `$toObject(this, "Class.prototype.method requires that |this| not be null or undefined");`
*/
declare function $toObject(object: any, errorMessage?: string): object;
/**
* ## References
* - [WebKit - `emit_intrinsic_newArrayWithSize`](https://github.com/oven-sh/WebKit/blob/e1a802a2287edfe7f4046a9dd8307c8b59f5d816/Source/JavaScriptCore/bytecompiler/NodesCodegen.cpp#L2317)
*/
declare function $newArrayWithSize<T>(size: number): T[];
declare function $newArrayWithSpecies(): TODO;
/**
* Optimized path for creating a new array storing objects with the same homogenous Structure
* as {@link array}.
*
* @param size the initial size of the new array
* @param array the array whose shape we want to copy
*
* @returns a new array
*
* ## References
* - [WebKit - `emit_intrinsic_newArrayWithSpecies`](https://github.com/oven-sh/WebKit/blob/e1a802a2287edfe7f4046a9dd8307c8b59f5d816/Source/JavaScriptCore/bytecompiler/NodesCodegen.cpp#L2328)
* - [WebKit - #4909](https://github.com/WebKit/WebKit/pull/4909)
* - [WebKit Bugzilla - Related Issue/Ticket](https://bugs.webkit.org/show_bug.cgi?id=245797)
*/
declare function $newArrayWithSpecies<T>(size: number, array: T[]): T[];
declare function $newPromise(): TODO;
declare function $createPromise(): TODO;
declare const $iterationKindKey: TODO;
@@ -452,6 +473,17 @@ declare function $createCommonJSModule(
declare function $overridableRequire(this: CommonJSModuleRecord, id: string): any;
// The following I cannot find any definitions of, but they are functional.
/**
* Converts a a number into an integer suitable for use as the length of an array-like object.
*
* @param length the number to convert
* @returns `length` as an int32
* @throws if `length` is not castable into an int32
*
* ## References
* - [ECMA-262 § 7.1.15](https://262.ecma-international.org/6.0/#sec-tolength)
* - [WebKit - `toLengthThunkGenerator`](https://github.com/oven-sh/WebKit/blob/e1a802a2287edfe7f4046a9dd8307c8b59f5d816/Source/JavaScriptCore/jit/ThunkGenerators.cpp#L1529)
*/
declare function $toLength(length: number): number;
declare function $isTypedArrayView(obj: unknown): obj is ArrayBufferView | DataView | Uint8Array;
declare function $setStateToMax(target: any, state: number): void;

View File

@@ -1,3 +1,9 @@
/// <reference path="../builtins.d.ts" />
/// <reference path="./ReadableStreamInternals.ts" />
/**
* ## References
* - [ReadableStream - `ReadableByteStreamController`](https://streams.spec.whatwg.org/#rbs-controller-class)
*/
/*
* Copyright (C) 2016 Canon Inc. All rights reserved.
*
@@ -31,6 +37,7 @@ export function privateInitializeReadableByteStreamController(this, stream, unde
if ($getByIdDirectPrivate(stream, "readableStreamController") !== null)
throw new TypeError("ReadableStream already has a controller");
// https://streams.spec.whatwg.org/#rbs-controller-internal-slots
$putByIdDirectPrivate(this, "controlledReadableStream", stream);
$putByIdDirectPrivate(this, "underlyingByteSource", underlyingByteSource);
$putByIdDirectPrivate(this, "pullAgain", false);
@@ -181,7 +188,11 @@ export function readableByteStreamControllerHandleQueueDrain(controller) {
else $readableByteStreamControllerCallPullIfNeeded(controller);
}
/**
* ## [`ReadableStream.pull(controller)`](https://streams.spec.whatwg.org/#dom-underlyingsource-pull)
*/
export function readableByteStreamControllerPull(controller) {
console.log(new Error().stack);
const stream = $getByIdDirectPrivate(controller, "controlledReadableStream");
$assert($readableStreamHasDefaultReader(stream));
if ($getByIdDirectPrivate(controller, "queue").content?.isNotEmpty()) {
@@ -204,7 +215,7 @@ export function readableByteStreamControllerPull(controller) {
} catch (error) {
return Promise.$reject(error);
}
const pullIntoDescriptor = {
const pullIntoDescriptor: PullIntoDescriptor = {
buffer,
byteOffset: 0,
byteLength: $getByIdDirectPrivate(controller, "autoAllocateChunkSize"),
@@ -213,6 +224,7 @@ export function readableByteStreamControllerPull(controller) {
ctor: Uint8Array,
readerType: "default",
};
console.log(".pull() - adding pullIntoDescriptor", pullIntoDescriptor);
$getByIdDirectPrivate(controller, "pendingPullIntos").push(pullIntoDescriptor);
}
@@ -289,6 +301,7 @@ export function readableStreamReaderKind(reader) {
}
export function readableByteStreamControllerEnqueue(controller, chunk) {
console.log("enqueuing chunk", chunk);
const stream = $getByIdDirectPrivate(controller, "controlledReadableStream");
$assert(!$getByIdDirectPrivate(controller, "closeRequested"));
$assert($getByIdDirectPrivate(stream, "state") === $streamReadable);
@@ -298,14 +311,17 @@ export function readableByteStreamControllerEnqueue(controller, chunk) {
) {
/* default reader */
case 1: {
if (!$getByIdDirectPrivate($getByIdDirectPrivate(stream, "reader"), "readRequests")?.isNotEmpty())
console.log("default reader");
if (!$getByIdDirectPrivate($getByIdDirectPrivate(stream, "reader"), "readRequests")?.isNotEmpty()) {
console.log("no read requests, enqueueing chunk")
$readableByteStreamControllerEnqueueChunk(
controller,
$transferBufferToCurrentRealm(chunk.buffer),
chunk.byteOffset,
chunk.byteLength,
);
else {
} else {
console.log("read requests are queued, fufilling read request");
$assert(!$getByIdDirectPrivate(controller, "queue").content.size());
const transferredView =
chunk.constructor === Uint8Array ? chunk : new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength);
@@ -316,6 +332,7 @@ export function readableByteStreamControllerEnqueue(controller, chunk) {
/* BYOB */
case 2: {
console.log("BYOB reader, enqueuing chunk");
$readableByteStreamControllerEnqueueChunk(
controller,
$transferBufferToCurrentRealm(chunk.buffer),
@@ -354,12 +371,19 @@ export function readableByteStreamControllerEnqueueChunk(controller, buffer, byt
byteLength: byteLength,
});
$getByIdDirectPrivate(controller, "queue").size += byteLength;
console.log(
"enqueued chunk",
"queue.content.size()",
$getByIdDirectPrivate(controller, "queue").content.size(),
"queue.size",
$getByIdDirectPrivate(controller, "queue").size,
);
}
export function readableByteStreamControllerRespondWithNewView(controller, view) {
$assert($getByIdDirectPrivate(controller, "pendingPullIntos").isNotEmpty());
let firstDescriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").peek();
let firstDescriptor: PullIntoDescriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").peek();
if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset)
throw new RangeError("Invalid value for view.byteOffset");
@@ -381,8 +405,9 @@ export function readableByteStreamControllerRespond(controller, bytesWritten) {
$readableByteStreamControllerRespondInternal(controller, bytesWritten);
}
// export declare function $readableByteStreamControllerRespondInternal(controller, bytesWritten);
export function readableByteStreamControllerRespondInternal(controller, bytesWritten) {
let firstDescriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").peek();
let firstDescriptor: PullIntoDescriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").peek();
let stream = $getByIdDirectPrivate(controller, "controlledReadableStream");
if ($getByIdDirectPrivate(stream, "state") === $streamClosed) {
@@ -393,7 +418,12 @@ export function readableByteStreamControllerRespondInternal(controller, bytesWri
}
}
export function readableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {
// export declare function $readableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor: PullIntoDescriptor)
export function readableByteStreamControllerRespondInReadableState(
controller,
bytesWritten,
pullIntoDescriptor: PullIntoDescriptor,
) {
if (pullIntoDescriptor.bytesFilled + bytesWritten > pullIntoDescriptor.byteLength)
throw new RangeError("bytesWritten value is too great");
@@ -444,12 +474,28 @@ export function readableByteStreamControllerRespondInClosedState(controller, fir
}
}
// Spec name: readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue (shortened for readability).
/**
* [readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue](https://streams.spec.whatwg.org/#readable-byte-stream-controller-process-pull-into-descriptors-using-queue)
*/
// export declare function $readableByteStreamControllerProcessPullDescriptors(controller);
export function readableByteStreamControllerProcessPullDescriptors(controller) {
console.log("processing pull into descriptors");
$assert(!$getByIdDirectPrivate(controller, "closeRequested"));
while ($getByIdDirectPrivate(controller, "pendingPullIntos").isNotEmpty()) {
if ($getByIdDirectPrivate(controller, "queue").size === 0) return;
let pullIntoDescriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").peek();
// if controller.[[queueTotalSize]] is 0, then break
if ($getByIdDirectPrivate(controller, "queue").size === 0) {
console.log(
"queue.content.size()",
$getByIdDirectPrivate(controller, "queue").content.size(),
"pendingPullIntos.size()",
$getByIdDirectPrivate(controller, "pendingPullIntos").size(),
);
return;
} else {
console.log("queue is not empty");
}
// Let pullIntoDescriptor be controller.[[pendingPullIntos]]
let pullIntoDescriptor: PullIntoDescriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").peek();
if ($readableByteStreamControllerFillDescriptorFromQueue(controller, pullIntoDescriptor)) {
$readableByteStreamControllerShiftPendingDescriptor(controller);
$readableByteStreamControllerCommitDescriptor(
@@ -461,7 +507,13 @@ export function readableByteStreamControllerProcessPullDescriptors(controller) {
}
// Spec name: readableByteStreamControllerFillPullIntoDescriptorFromQueue (shortened for readability).
export function readableByteStreamControllerFillDescriptorFromQueue(controller, pullIntoDescriptor) {
/**
* [`readableByteStreamControllerFillPullIntoDescriptorFromQueue`](https://streams.spec.whatwg.org/#readable-byte-stream-controller-fill-pull-into-descriptor-from-queue)
*/
export function readableByteStreamControllerFillDescriptorFromQueue(
controller,
pullIntoDescriptor: PullIntoDescriptor,
) {
const currentAlignedBytes =
pullIntoDescriptor.bytesFilled - (pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize);
const maxBytesToCopy =
@@ -493,8 +545,18 @@ export function readableByteStreamControllerFillDescriptorFromQueue(controller,
destStart,
);
if (headOfQueue.byteLength === bytesToCopy) $getByIdDirectPrivate(controller, "queue").content.shift();
else {
if (headOfQueue.byteLength === bytesToCopy) {
console.log(
"shifting queue",
"queue.content.size()",
$getByIdDirectPrivate(controller, "queue").content.size(),
"queue.content.peek()",
$getByIdDirectPrivate(controller, "queue").content.peek(),
"queue.size",
$getByIdDirectPrivate(controller, "queue").size,
);
$getByIdDirectPrivate(controller, "queue").content.shift();
} else {
headOfQueue.byteOffset += bytesToCopy;
headOfQueue.byteLength -= bytesToCopy;
}
@@ -519,6 +581,7 @@ export function readableByteStreamControllerFillDescriptorFromQueue(controller,
}
// Spec name: readableByteStreamControllerShiftPendingPullInto (renamed for consistency).
// export declare function $readableByteStreamControllerShiftPendingDescriptor(controller): PullIntoDescriptor;
export function readableByteStreamControllerShiftPendingDescriptor(controller) {
let descriptor = $getByIdDirectPrivate(controller, "pendingPullIntos").shift();
$readableByteStreamControllerInvalidateBYOBRequest(controller);
@@ -534,6 +597,7 @@ export function readableByteStreamControllerInvalidateBYOBRequest(controller) {
}
// Spec name: readableByteStreamControllerCommitPullIntoDescriptor (shortened for readability).
// export declare function $readableByteStreamControllerCommitDescriptor(stream, pullIntoDescriptor: PullIntoDescriptor);
export function readableByteStreamControllerCommitDescriptor(stream, pullIntoDescriptor) {
$assert($getByIdDirectPrivate(stream, "state") !== $streamErrored);
let done = false;
@@ -600,7 +664,7 @@ export function readableByteStreamControllerPullInto(controller, view) {
// name has already been met before.
const ctor = view.constructor;
const pullIntoDescriptor = {
const pullIntoDescriptor: PullIntoDescriptor = {
buffer: view.buffer,
byteOffset: view.byteOffset,
byteLength: view.byteLength,
@@ -654,3 +718,115 @@ export function readableStreamAddReadIntoRequest(stream) {
return readRequest;
}
// buffer
// An ArrayBuffer
// buffer byte length
// A positive integer representing the initial byte length of buffer
// byte offset
// A nonnegative integer byte offset into the buffer where the underlying byte source will start writing
// byte length
// A positive integer number of bytes which can be written into the buffer
// bytes filled
// A nonnegative integer number of bytes that have been written into the buffer so far
// minimum fill
// A positive integer representing the minimum number of bytes that must be written into the buffer before the associated read() request may be fulfilled. By default, this equals the element size.
// element size
// A positive integer representing the number of bytes that can be written into the buffer at a time, using views of the type described by the view constructor
// view constructor
// A typed array constructor or %DataView%, which will be used for constructing a view with which to write into the buffer
// reader type
// Either "default" or "byob", indicating what type of readable stream reader initiated this request, or "none" if the initiating reader was released
/**
* ## References
* - [Spec](https://streams.spec.whatwg.org/#pull-into-descriptor)
*/
interface PullIntoDescriptor {
/**
* An {@link ArrayBuffer}
*/
buffer: ArrayBuffer;
/**
* A positive integer representing the initial byte length of {@link buffer}
*/
bufferByteLength: number;
/**
* A nonnegative integer byte offset into the {@link buffer} where the
* underlying byte source will start writing
*/
byteOffset: number;
/**
* A positive integer number of bytes which can be written into the
* {@link buffer}
*/
byteLength: number;
/**
* A nonnegative integer number of bytes that have been written into the
* {@link buffer} so far
*/
bytesFilled: number;
/**
* A positive integer representing the minimum number of bytes that must be
* written into the {@link buffer} before the associated read() request may be
* fulfilled. By default, this equals the element size.
*/
minimumFill: number;
/**
* A positive integer representing the number of bytes that can be written
* into the {@link buffer} at a time, using views of the type described by the
* view constructor
*/
elementSize: number;
/**
* `view constructor`
*
* A {@link NodeJS.TypedArray typed array constructor} or
* {@link NodeJS.DataView `%DataView%`}, which will be used for constructing a
* view with which to write into the {@link buffer}
*
* ## References
* - [`TypedArray` Constructors](https://tc39.es/ecma262/#table-49)
*/
ctor: ArrayBufferViewConstructor;
/**
* Either "default" or "byob", indicating what type of readable stream reader
* initiated this request, or "none" if the initiating reader was released
*/
readerType: "default" | "byob" | "none";
}
// type TypedArray =
// | Uint8Array
// | Uint8ClampedArray
// | Uint16Array
// | Uint32Array
// | Int8Array
// | Int16Array
// | Int32Array
// | BigUint64Array
// | BigInt64Array
// | Float32Array
// | Float64Array;
// type ArrayBufferView = TypedArray | DataView;
type TypedArrayConstructor =
| Uint8ArrayConstructor
| Uint8ClampedArrayConstructor
| Uint16ArrayConstructor
| Uint32ArrayConstructor
| Int8ArrayConstructor
| Int16ArrayConstructor
| Int32ArrayConstructor
| BigUint64ArrayConstructor
| BigInt64ArrayConstructor
| Float32ArrayConstructor
| Float64ArrayConstructor;
type ArrayBufferViewConstructor = TypedArrayConstructor | DataViewConstructor;

View File

@@ -151,3 +151,4 @@ export const bindgen = $zig("bindgen_test.zig", "getBindgenTestFunctions") as {
};
export const noOpForTesting = $cpp("NoOpForTesting.cpp", "createNoOpForTesting");
export const Denqueue = require("internal/fifo")

View File

@@ -1,5 +1,5 @@
var slice = Array.prototype.slice;
class Denqueue {
class Denqueue<T> {
constructor() {
this._head = 0;
this._tail = 0;
@@ -8,10 +8,10 @@ class Denqueue {
this._list = $newArrayWithSize(4);
}
_head;
_tail;
_capacityMask;
_list;
_head: number;
_tail: number;
_capacityMask: number;
_list: T[];
size() {
if (this._head === this._tail) return 0;
@@ -31,7 +31,8 @@ class Denqueue {
var { _head: head, _tail, _list, _capacityMask } = this;
if (head === _tail) return undefined;
var item = _list[head];
$putByValDirect(_list, head, undefined);
// $putByValDirect(_list, head, undefined);
this._list[head] = undefined;
head = this._head = (head + 1) & _capacityMask;
if (head < 2 && _tail > 10000 && _tail <= _list.length >>> 2) this._shrinkArray();
return item;

View File

@@ -1,17 +1,29 @@
import type { Subprocess } from "bun";
import { afterEach, beforeEach, expect, it } from "bun:test";
import jsc from "bun:jsc";
import { describe, afterEach, beforeEach, expect, it, afterAll } from "bun:test";
import { bunEnv, bunExe, isDebug, isFlaky, isLinux } from "harness";
import { join } from "path";
const payload = Buffer.alloc(512 * 1024, "1").toString("utf-8"); // decent size payload to test memory leak
const batchSize = 40;
const totalCount = 10_000;
const zeroCopyPayload = new Blob([payload]);
const zeroCopyJSONPayload = new Blob([JSON.stringify({ bun: payload })]);
var payload = Buffer.alloc(512 * 1024, "1").toString("utf-8"); // decent size payload to test memory leak
var batchSize = 40;
var totalCount = 10_000;
var zeroCopyPayload = new Blob([payload]);
var zeroCopyJSONPayload = new Blob([JSON.stringify({ bun: payload })]);
var process: Subprocess<"ignore", "inherit", "inherit">;
afterAll(() => {
process.kill();
payload = undefined as any;
batchSize = undefined as any;
totalCount = undefined as any;
process = undefined as any;
Bun.gc(true);
});
async function getURL() {
let defer = Promise.withResolvers<string>();
const process = Bun.spawn([bunExe(), "--smol", join(import.meta.dirname, "body-leak-test-fixture.ts")], {
process = Bun.spawn([bunExe(), "--smol", join(import.meta.dirname, "body-leak-test-fixture.ts")], {
env: bunEnv,
stdout: "inherit",
stderr: "inherit",
@@ -139,28 +151,57 @@ async function calculateMemoryLeak(fn: (url: URL) => Promise<void>, url: URL) {
// Since the payload size is 512 KB
// If it was leaking the body, the memory usage would be at least 512 KB * 10_000 = 5 GB
// If it ends up around 280 MB, it's probably not leaking the body.
for (const test_info of [
["#10265 should not leak memory when ignoring the body", callIgnore, false, 64],
["should not leak memory when buffering the body", callBuffering, false, 64],
["should not leak memory when buffering a JSON body", callJSONBuffering, false, 64],
["should not leak memory when buffering the body and accessing req.body", callBufferingBodyGetter, false, 64],
["should not leak memory when streaming the body", callStreaming, isFlaky && isLinux, 64],
["should not leak memory when streaming the body incompletely", callIncompleteStreaming, false, 64],
["should not leak memory when streaming the body and echoing it back", callStreamingEcho, false, 64],
] as const) {
const [testName, fn, skip, maxMemoryGrowth] = test_info;
it.todoIf(skip)(
testName,
async () => {
const { url, process } = await getURL();
await using processHandle = process;
const report = await calculateMemoryLeak(fn, url);
// peak memory is too high
expect(report.peak_memory).not.toBeGreaterThan(report.start_memory * 2.5);
// acceptable memory leak
expect(report.leak).toBeLessThanOrEqual(maxMemoryGrowth);
expect(report.end_memory).toBeLessThanOrEqual(512 * 1024 * 1024);
},
isDebug ? 60_000 : 40_000,
);
}
type TestCase = [name: string, fn: (url: URL) => Promise<void>, skip: boolean, maxMemoryGrowth: number];
const tests: readonly TestCase[] = [
// ["#10265 should not leak memory when ignoring the body", callIgnore, false, 64],
// ["should not leak memory when buffering the body", callBuffering, false, 64],
// ["should not leak memory when buffering a JSON body", callJSONBuffering, false, 64],
// ["should not leak memory when buffering the body and accessing req.body", callBufferingBodyGetter, false, 64],
// ["should not leak memory when streaming the body", callStreaming, isFlaky && isLinux, 64],
// ["should not leak memory when streaming the body incompletely", callIncompleteStreaming, false, 64],
// ["should not leak memory when streaming the body and echoing it back", callStreamingEcho, false, 64],
// ["ignoring the body (#10265)", callIgnore, false, 64],
// ["buffering the body", callBuffering, false, 64],
// ["buffering a JSON body", callJSONBuffering, false, 64],
["buffering the body and accessing req.body", callBufferingBodyGetter, false, 64],
// ["streaming the body", callStreaming, isFlaky && isLinux, 64],
// ["streaming the body incompletely", callIncompleteStreaming, false, 64],
// ["streaming the body and echoing it back", callStreamingEcho, false, 64],
];
describe.each(tests)("When %s", (name, fn, skip, maxMemoryGrowth) => {
it.skipIf(skip)("should not leak memory", async () => {
const { url, process } = await getURL();
await using processHandle = process;
const report = await calculateMemoryLeak(fn, url);
// peak memory is too high
expect(report.peak_memory).not.toBeGreaterThan(report.start_memory * 2.5);
// acceptable memory leak
expect(report.leak).toBeLessThanOrEqual(maxMemoryGrowth);
expect(report.end_memory).toBeLessThanOrEqual(512 * 1024 * 1024);
});
});
// for (const test_info of [
// ["#10265 should not leak memory when ignoring the body", callIgnore, false, 64],
// ["should not leak memory when buffering the body", callBuffering, false, 64],
// ["should not leak memory when buffering a JSON body", callJSONBuffering, false, 64],
// ["should not leak memory when buffering the body and accessing req.body", callBufferingBodyGetter, false, 64],
// ["should not leak memory when streaming the body", callStreaming, isFlaky && isLinux, 64],
// ["should not leak memory when streaming the body incompletely", callIncompleteStreaming, false, 64],
// ["should not leak memory when streaming the body and echoing it back", callStreamingEcho, false, 64],
// ] as const) {
// const [testName, fn, skip, maxMemoryGrowth] = test_info;
// it.todoIf(skip)(
// testName,
// async () => {
// const { url, process } = await getURL();
// await using processHandle = process;
// const report = await calculateMemoryLeak(fn, url);
// // peak memory is too high
// expect(report.peak_memory).not.toBeGreaterThan(report.start_memory * 2.5);
// // acceptable memory leak
// expect(report.leak).toBeLessThanOrEqual(maxMemoryGrowth);
// expect(report.end_memory).toBeLessThanOrEqual(512 * 1024 * 1024);
// },
// isDebug ? 60_000 : 40_000,
// );
// }