Compare commits

..

2 Commits

Author SHA1 Message Date
Claude Bot
e0070ae192 fix: forward options in TestContext.test() and adjust test timeout
Address review feedback:
- Forward options (including timeout) to bun:test in all
  TestContext.test() branches (only, todo, skip, default)
- Reduce outer test timeout from 30s to 15s with explanatory comment

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-25 16:24:55 +00:00
Claude Bot
e1140dd834 fix(test): use no-timeout default for node:test to match Node.js semantics
Node.js's `node:test` defaults to `Infinity` timeout (tests never time
out), but Bun was applying its own 5000ms default. This caused async
tests taking longer than 5s to fail unexpectedly.

Two changes:
- Set timeout=0 (no timeout) as the default for node:test when the user
  hasn't specified one, matching Node.js behavior.
- Replace the done-callback wrapper with an async function to avoid
  bun:test misinterpreting the wrapper's `done` parameter as a
  done-callback style test, which caused misleading error messages.

Closes #27422

Co-Authored-By: Claude <noreply@anthropic.com>
2026-02-25 16:12:51 +00:00
10 changed files with 62 additions and 409 deletions

View File

@@ -33,7 +33,7 @@ const stream = await renderToReadableStream(<Component message="Hello from serve
Combining this with `Bun.serve()`, we get a simple SSR HTTP server:
```tsx server.tsx icon="/icons/typescript.svg"
```tsx server.ts icon="/icons/typescript.svg"
Bun.serve({
async fetch() {
const stream = await renderToReadableStream(<Component message="Hello from server!" />);

View File

@@ -144,15 +144,9 @@ static JSC::EncodedJSValue JSC_HOST_CALL_ATTRIBUTES functionFuzzilli(JSC::JSGlob
WTF::String output = arg1.toWTFString(globalObject);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode(JSC::jsUndefined()));
// Use a static FILE* to avoid repeatedly calling fdopen (which
// duplicates the descriptor and leaks) and to gracefully handle
// the case where REPRL_DWFD is not open (i.e. running outside
// the fuzzer harness).
static FILE* f = fdopen(REPRL_DWFD, "w");
if (f) {
fprintf(f, "%s\n", output.utf8().data());
fflush(f);
}
FILE* f = fdopen(REPRL_DWFD, "w");
fprintf(f, "%s\n", output.utf8().data());
fflush(f);
}
}

View File

@@ -2179,67 +2179,15 @@ pub const BundleV2 = struct {
output_file.is_executable = true;
}
// Write external sourcemap files next to the compiled executable and
// keep them in the output array. Destroy all other non-entry-point files.
// With --splitting, there can be multiple sourcemap files (one per chunk).
var kept: usize = 0;
for (output_files.items, 0..) |*current, i| {
if (i == entry_point_index) {
output_files.items[kept] = current.*;
kept += 1;
} else if (result == .success and current.output_kind == .sourcemap and current.value == .buffer) {
const sourcemap_bytes = current.value.buffer.bytes;
if (sourcemap_bytes.len > 0) {
// Derive the .map filename from the sourcemap's own dest_path,
// placed in the same directory as the compiled executable.
const map_basename = if (current.dest_path.len > 0)
bun.path.basename(current.dest_path)
else
bun.path.basename(bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{s}.map", .{full_outfile_path})));
const sourcemap_full_path = if (dirname.len == 0 or strings.eqlComptime(dirname, "."))
bun.handleOom(bun.default_allocator.dupe(u8, map_basename))
else
bun.handleOom(std.fmt.allocPrint(bun.default_allocator, "{s}{c}{s}", .{ dirname, std.fs.path.sep, map_basename }));
// Write the sourcemap file to disk next to the executable
var pathbuf: bun.PathBuffer = undefined;
const write_path = if (Environment.isWindows) sourcemap_full_path else map_basename;
switch (bun.jsc.Node.fs.NodeFS.writeFileWithPathBuffer(
&pathbuf,
.{
.data = .{ .buffer = .{
.buffer = .{
.ptr = @constCast(sourcemap_bytes.ptr),
.len = @as(u32, @truncate(sourcemap_bytes.len)),
.byte_len = @as(u32, @truncate(sourcemap_bytes.len)),
},
} },
.encoding = .buffer,
.dirfd = .fromStdDir(root_dir),
.file = .{ .path = .{
.string = bun.PathString.init(write_path),
} },
},
)) {
.err => |err| {
bun.Output.err(err, "failed to write sourcemap file '{s}'", .{write_path});
current.deinit();
},
.result => {
current.dest_path = sourcemap_full_path;
output_files.items[kept] = current.*;
kept += 1;
},
}
} else {
current.deinit();
}
} else {
if (i != entry_point_index) {
current.deinit();
}
}
output_files.items.len = kept;
const entry_point_output_file = output_files.swapRemove(entry_point_index);
output_files.items.len = 1;
output_files.items[0] = entry_point_output_file;
return result;
}

View File

@@ -546,57 +546,6 @@ pub const BuildCommand = struct {
Global.exit(1);
}
// Write external sourcemap files next to the compiled executable.
// With --splitting, there can be multiple .map files (one per chunk).
if (this_transpiler.options.source_map == .external) {
for (output_files) |f| {
if (f.output_kind == .sourcemap and f.value == .buffer) {
const sourcemap_bytes = f.value.buffer.bytes;
if (sourcemap_bytes.len == 0) continue;
// Use the sourcemap's own dest_path basename if available,
// otherwise fall back to {outfile}.map
const map_basename = if (f.dest_path.len > 0)
bun.path.basename(f.dest_path)
else brk: {
const exe_base = bun.path.basename(outfile);
break :brk if (compile_target.os == .windows and !strings.hasSuffixComptime(exe_base, ".exe"))
try std.fmt.allocPrint(allocator, "{s}.exe.map", .{exe_base})
else
try std.fmt.allocPrint(allocator, "{s}.map", .{exe_base});
};
// root_dir already points to the outfile's parent directory,
// so use map_basename (not a path with directory components)
// to avoid writing to a doubled directory path.
var pathbuf: bun.PathBuffer = undefined;
switch (bun.jsc.Node.fs.NodeFS.writeFileWithPathBuffer(
&pathbuf,
.{
.data = .{ .buffer = .{
.buffer = .{
.ptr = @constCast(sourcemap_bytes.ptr),
.len = @as(u32, @truncate(sourcemap_bytes.len)),
.byte_len = @as(u32, @truncate(sourcemap_bytes.len)),
},
} },
.encoding = .buffer,
.dirfd = .fromStdDir(root_dir),
.file = .{ .path = .{
.string = bun.PathString.init(map_basename),
} },
},
)) {
.err => |err| {
Output.err(err, "failed to write sourcemap file '{s}'", .{map_basename});
had_err = true;
},
.result => {},
}
}
}
}
const compiled_elapsed = @divTrunc(@as(i64, @truncate(std.time.nanoTimestamp() - bundled_end)), @as(i64, std.time.ns_per_ms));
const compiled_elapsed_digit_count: isize = switch (compiled_elapsed) {
0...9 => 3,

View File

@@ -149,13 +149,13 @@ class TestContext {
const { test } = bunTest();
if (options.only) {
test.only(name, fn);
test.only(name, fn, options);
} else if (options.todo) {
test.todo(name, fn);
test.todo(name, fn, options);
} else if (options.skip) {
test.skip(name, fn);
test.skip(name, fn, options);
} else {
test(name, fn);
test(name, fn, options);
}
}
@@ -304,32 +304,25 @@ function createTest(arg0: unknown, arg1: unknown, arg2: unknown) {
checkNotInsideTest(ctx, "test");
const context = new TestContext(true, name, Bun.main, ctx);
const runTest = (done: (error?: unknown) => void) => {
// Return an async function instead of a done-callback style function.
// Using (done) => {} would cause bun:test to interpret the function as
// a done-callback test (because callback.length >= 1), leading to
// misleading "done callback" error messages on timeout.
const runTest = async () => {
const originalContext = ctx;
ctx = context;
const endTest = (error?: unknown) => {
try {
done(error);
} finally {
ctx = originalContext;
}
};
let result: unknown;
try {
result = fn(context);
} catch (error) {
endTest(error);
return;
}
if (result instanceof Promise) {
(result as Promise<unknown>).then(() => endTest()).catch(error => endTest(error));
} else {
endTest();
await fn(context);
} finally {
ctx = originalContext;
}
};
return { name, options, fn: runTest };
// Node.js node:test defaults to Infinity timeout (no timeout).
// In bun:test, timeout=0 means "no timeout", so use that as default.
const testOptions = { ...options, timeout: options.timeout ?? 0 };
return { name, options: testOptions, fn: runTest };
}
function createDescribe(arg0: unknown, arg1: unknown, arg2: unknown) {

View File

@@ -976,9 +976,7 @@ pub const FormData = struct {
}
pub const Field = struct {
/// Raw slice into the input buffer. Not using `bun.Semver.String` because
/// file bodies can exceed 4 GB and Semver.String truncates to u32.
value: []const u8 = "",
value: bun.Semver.String = .{},
filename: bun.Semver.String = .{},
content_type: bun.Semver.String = .{},
is_file: bool = false,
@@ -1090,7 +1088,7 @@ pub const FormData = struct {
form: *jsc.DOMFormData,
pub fn onEntry(wrap: *@This(), name: bun.Semver.String, field: Field, buf: []const u8) void {
const value_str = field.value;
const value_str = field.value.slice(buf);
var key = jsc.ZigString.initUTF8(name.slice(buf));
if (field.is_file) {
@@ -1280,7 +1278,7 @@ pub const FormData = struct {
if (strings.endsWithComptime(body, "\r\n")) {
body = body[0 .. body.len - 2];
}
field.value = body;
field.value = subslicer.sub(body).value();
field.filename = filename orelse .{};
field.is_file = is_file;

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
import { bunEnv, tempDir } from "harness";
import { join } from "path";
describe("Bun.build compile with sourcemap", () => {
@@ -26,9 +26,9 @@ main();`,
});
expect(result.success).toBe(true);
expect(result.outputs.length).toBe(1);
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
const executablePath = executableOutput.path;
const executablePath = result.outputs[0].path;
expect(await Bun.file(executablePath).exists()).toBe(true);
// Run the compiled executable and capture the error
@@ -94,167 +94,6 @@ main();`,
expect(exitCode).not.toBe(0);
});
test("compile with sourcemap: external writes .map file to disk", async () => {
using dir = tempDir("build-compile-sourcemap-external-file", helperFiles);
const result = await Bun.build({
entrypoints: [join(String(dir), "app.js")],
compile: true,
sourcemap: "external",
});
expect(result.success).toBe(true);
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
const executablePath = executableOutput.path;
expect(await Bun.file(executablePath).exists()).toBe(true);
// The sourcemap output should appear in build result outputs
const sourcemapOutputs = result.outputs.filter((o: any) => o.kind === "sourcemap");
expect(sourcemapOutputs.length).toBe(1);
// The .map file should exist next to the executable
const mapPath = sourcemapOutputs[0].path;
expect(mapPath).toEndWith(".map");
expect(await Bun.file(mapPath).exists()).toBe(true);
// Validate the sourcemap is valid JSON with expected fields
const mapContent = JSON.parse(await Bun.file(mapPath).text());
expect(mapContent.version).toBe(3);
expect(mapContent.sources).toBeArray();
expect(mapContent.sources.length).toBeGreaterThan(0);
expect(mapContent.mappings).toBeString();
});
test("compile without sourcemap does not write .map file", async () => {
using dir = tempDir("build-compile-no-sourcemap-file", {
"nosourcemap_entry.js": helperFiles["app.js"],
"helper.js": helperFiles["helper.js"],
});
const result = await Bun.build({
entrypoints: [join(String(dir), "nosourcemap_entry.js")],
compile: true,
});
expect(result.success).toBe(true);
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
const executablePath = executableOutput.path;
// No .map file should exist next to the executable
expect(await Bun.file(`${executablePath}.map`).exists()).toBe(false);
// No sourcemap outputs should be in the result
const sourcemapOutputs = result.outputs.filter((o: any) => o.kind === "sourcemap");
expect(sourcemapOutputs.length).toBe(0);
});
test("compile with splitting and external sourcemap writes multiple .map files", async () => {
using dir = tempDir("build-compile-sourcemap-splitting", {
"entry.js": `
const mod = await import("./lazy.js");
mod.greet();
`,
"lazy.js": `
export function greet() {
console.log("hello from lazy module");
}
`,
});
const result = await Bun.build({
entrypoints: [join(String(dir), "entry.js")],
compile: true,
splitting: true,
sourcemap: "external",
});
expect(result.success).toBe(true);
const executableOutput = result.outputs.find((o: any) => o.kind === "entry-point")!;
const executablePath = executableOutput.path;
expect(await Bun.file(executablePath).exists()).toBe(true);
// With splitting and a dynamic import, there should be at least 2 sourcemaps
// (one for the entry chunk, one for the lazy-loaded chunk)
const sourcemapOutputs = result.outputs.filter((o: any) => o.kind === "sourcemap");
expect(sourcemapOutputs.length).toBeGreaterThanOrEqual(2);
// Each sourcemap should be a valid .map file on disk
const mapPaths = new Set<string>();
for (const sm of sourcemapOutputs) {
expect(sm.path).toEndWith(".map");
expect(await Bun.file(sm.path).exists()).toBe(true);
// Each map file should have a unique path (no overwrites)
expect(mapPaths.has(sm.path)).toBe(false);
mapPaths.add(sm.path);
// Validate the sourcemap is valid JSON
const mapContent = JSON.parse(await Bun.file(sm.path).text());
expect(mapContent.version).toBe(3);
expect(mapContent.mappings).toBeString();
}
// Run the compiled executable to ensure it works
await using proc = Bun.spawn({
cmd: [executablePath],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stdout).toContain("hello from lazy module");
expect(exitCode).toBe(0);
});
test("compile with --outfile subdir/myapp writes .map next to executable", async () => {
using dir = tempDir("build-compile-sourcemap-outfile-subdir", helperFiles);
const subdirPath = join(String(dir), "subdir");
const exeSuffix = process.platform === "win32" ? ".exe" : "";
// Use CLI: bun build --compile --outfile subdir/myapp --sourcemap=external
await using proc = Bun.spawn({
cmd: [
bunExe(),
"build",
"--compile",
join(String(dir), "app.js"),
"--outfile",
join(subdirPath, "myapp"),
"--sourcemap=external",
],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [_stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).toBe("");
expect(exitCode).toBe(0);
// The executable should be at subdir/myapp (with .exe on Windows)
expect(await Bun.file(join(subdirPath, `myapp${exeSuffix}`)).exists()).toBe(true);
// The .map file should be in subdir/ (next to the executable)
const glob = new Bun.Glob("*.map");
const mapFiles = Array.from(glob.scanSync({ cwd: subdirPath }));
expect(mapFiles.length).toBe(1);
// Validate the sourcemap is valid JSON
const mapContent = JSON.parse(await Bun.file(join(subdirPath, mapFiles[0])).text());
expect(mapContent.version).toBe(3);
expect(mapContent.mappings).toBeString();
// Verify no .map was written into the doubled path subdir/subdir/
expect(await Bun.file(join(String(dir), "subdir", "subdir", "myapp.map")).exists()).toBe(false);
});
test("compile with multiple source files", async () => {
using dir = tempDir("build-compile-sourcemap-multiple-files", {
"utils.js": `export function utilError() {

View File

@@ -0,0 +1,7 @@
import { it } from "node:test";
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms));
it("async test exceeding default bun timeout", async () => {
await sleep(7000);
});

View File

@@ -0,0 +1,23 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe } from "harness";
test("node:test async tests should not time out by default", async () => {
await using proc = Bun.spawn({
cmd: [bunExe(), "test", "--timeout", "5000", import.meta.dir + "/27422-fixture.test.mjs"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
const output = stdout + stderr;
// The test should pass because node:test defaults to no timeout,
// even though bun:test's default is 5000ms.
expect(output).toContain("1 pass");
expect(output).toContain("0 fail");
// Should not contain the misleading "done callback" error message
expect(output).not.toContain("done callback");
expect(exitCode).toBe(0);
// The spawned test sleeps for 7s, so this outer bun:test needs a longer timeout.
}, 15_000);

View File

@@ -1,98 +0,0 @@
import { expect, test } from "bun:test";
// Regression test for https://github.com/oven-sh/bun/issues/27441
// req.formData() used bun.Semver.String (u32 length) for file body values,
// which silently truncated files >= 4GB. The fix stores file body values as
// native slices instead.
//
// We can't allocate 4GB+ in CI, but we verify the code path with a meaningful
// payload size to ensure formData parsing preserves the full file body.
test("formData() preserves file size for large uploads", async () => {
const FILE_SIZE = 10 * 1024 * 1024; // 10 MB
const payload = Buffer.alloc(FILE_SIZE, 0x42);
using server = Bun.serve({
port: 0,
maxRequestBodySize: FILE_SIZE * 4,
async fetch(req) {
const formData = await req.formData();
const file = formData.get("file") as Blob;
return Response.json({
receivedSize: file?.size ?? 0,
});
},
});
const form = new FormData();
form.append("file", new Blob([payload]), "test.bin");
const res = await fetch(server.url, {
method: "POST",
body: form,
});
const { receivedSize } = (await res.json()) as { receivedSize: number };
expect(receivedSize).toBe(FILE_SIZE);
});
test("formData() file content is not corrupted", async () => {
// Verify content integrity, not just size
const content = "Hello, World! This is a test file for issue #27441.";
using server = Bun.serve({
port: 0,
async fetch(req) {
const formData = await req.formData();
const file = formData.get("file") as Blob;
const text = await file.text();
return Response.json({
receivedSize: file?.size ?? 0,
content: text,
});
},
});
const form = new FormData();
form.append("file", new Blob([content]), "test.txt");
const res = await fetch(server.url, {
method: "POST",
body: form,
});
const json = (await res.json()) as { receivedSize: number; content: string };
expect(json.receivedSize).toBe(content.length);
expect(json.content).toBe(content);
});
test("formData() handles multiple files with correct sizes", async () => {
const sizes = [1024, 1024 * 100, 1024 * 1024]; // 1KB, 100KB, 1MB
using server = Bun.serve({
port: 0,
maxRequestBodySize: 1024 * 1024 * 10,
async fetch(req) {
const formData = await req.formData();
const results: number[] = [];
for (const size of sizes) {
const file = formData.get(`file_${size}`) as Blob;
results.push(file?.size ?? 0);
}
return Response.json({ receivedSizes: results });
},
});
const form = new FormData();
for (const size of sizes) {
form.append(`file_${size}`, new Blob([Buffer.alloc(size, 0x41)]), `test_${size}.bin`);
}
const res = await fetch(server.url, {
method: "POST",
body: form,
});
const { receivedSizes } = (await res.json()) as { receivedSizes: number[] };
expect(receivedSizes).toEqual(sizes);
});