mirror of
https://github.com/oven-sh/bun
synced 2026-02-26 11:37:26 +01:00
Compare commits
15 Commits
claude/fix
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9d281c5919 | ||
|
|
f88f60af5a | ||
|
|
232e0df956 | ||
|
|
e13a0687e5 | ||
|
|
02daea84e0 | ||
|
|
cde91147e0 | ||
|
|
b9bd652a6b | ||
|
|
9f0e78fc42 | ||
|
|
043fafeefa | ||
|
|
ce173b1112 | ||
|
|
0c3b5e501b | ||
|
|
5dc72bc1d8 | ||
|
|
dfc36a8255 | ||
|
|
01602c9223 | ||
|
|
c6d4fb512e |
@@ -1081,6 +1081,28 @@ pub const JSBundler = struct {
|
||||
return globalThis.throwInvalidArguments("Expected a config object to be passed to Bun.build", .{});
|
||||
}
|
||||
|
||||
const vm = globalThis.bunVM();
|
||||
|
||||
// Detect and prevent calling Bun.build from within a macro during bundling.
|
||||
// This would cause a deadlock because:
|
||||
// 1. The bundler thread (singleton) is processing the outer Bun.build
|
||||
// 2. During parsing, it encounters a macro and evaluates it
|
||||
// 3. The macro calls Bun.build, which tries to enqueue to the same singleton thread
|
||||
// 4. The singleton thread is blocked waiting for the macro to complete -> deadlock
|
||||
if (vm.macro_mode) {
|
||||
return globalThis.throw(
|
||||
\\Bun.build cannot be called from within a macro during bundling.
|
||||
\\
|
||||
\\This would cause a deadlock because the bundler is waiting for the macro to complete,
|
||||
\\but the macro's Bun.build call is waiting for the bundler.
|
||||
\\
|
||||
\\To bundle code at compile time in a macro, use Bun.spawnSync to invoke the CLI:
|
||||
\\ const result = Bun.spawnSync(["bun", "build", entrypoint, "--format=esm"]);
|
||||
,
|
||||
.{},
|
||||
);
|
||||
}
|
||||
|
||||
var plugins: ?*Plugin = null;
|
||||
const config = try Config.fromJS(globalThis, arguments[0], &plugins, bun.default_allocator);
|
||||
|
||||
@@ -1088,7 +1110,7 @@ pub const JSBundler = struct {
|
||||
config,
|
||||
plugins,
|
||||
globalThis,
|
||||
globalThis.bunVM().eventLoop(),
|
||||
vm.eventLoop(),
|
||||
bun.default_allocator,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1489,7 +1489,7 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
|
||||
const path = blob.store.?.data.s3.path();
|
||||
const env = globalThis.bunVM().transpiler.env;
|
||||
|
||||
S3.stat(credentials, path, @ptrCast(&onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, blob.store.?.data.s3.request_payer) catch {}; // TODO: properly propagate exception upwards
|
||||
S3.stat(credentials, path, @ptrCast(&onS3SizeResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, blob.store.?.data.s3.request_payer) catch {}; // TODO: properly propagate exception upwards
|
||||
return;
|
||||
}
|
||||
this.renderMetadata();
|
||||
@@ -1871,6 +1871,9 @@ pub fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool,
|
||||
switch (stream.ptr) {
|
||||
.Invalid => {
|
||||
this.response_body_readable_stream_ref.deinit();
|
||||
// Stream is invalid, render empty body
|
||||
this.doRenderBlob();
|
||||
return;
|
||||
},
|
||||
// toBlobIfPossible will typically convert .Blob streams, or .File streams into a Blob object, but cannot always.
|
||||
.Blob,
|
||||
|
||||
@@ -2806,24 +2806,6 @@ void GlobalObject::addBuiltinGlobals(JSC::VM& vm)
|
||||
consoleObject->putDirectCustomAccessor(vm, Identifier::fromString(vm, "Console"_s), CustomGetterSetter::create(vm, getConsoleConstructor, nullptr), PropertyAttribute::CustomValue | 0);
|
||||
consoleObject->putDirectCustomAccessor(vm, Identifier::fromString(vm, "_stdout"_s), CustomGetterSetter::create(vm, getConsoleStdout, nullptr), PropertyAttribute::DontEnum | PropertyAttribute::CustomValue | 0);
|
||||
consoleObject->putDirectCustomAccessor(vm, Identifier::fromString(vm, "_stderr"_s), CustomGetterSetter::create(vm, getConsoleStderr, nullptr), PropertyAttribute::DontEnum | PropertyAttribute::CustomValue | 0);
|
||||
|
||||
// Wrap console methods to handle zero-argument calls (fixes #26151).
|
||||
// JSC's ConsoleClient skips calling messageWithTypeAndLevel when there are no arguments,
|
||||
// but Node.js prints an empty line in that case. These wrappers ensure we pass an empty
|
||||
// string when called with no arguments, so our messageWithTypeAndLevel is invoked.
|
||||
auto wrapConsoleMethod = [&](const Identifier& publicName, const Identifier& privateName, JSC::FunctionExecutable* (*codeGenerator)(JSC::VM&)) {
|
||||
JSValue nativeMethod = consoleObject->get(this, publicName);
|
||||
if (nativeMethod.isCallable()) {
|
||||
consoleObject->putDirect(vm, privateName, nativeMethod, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
|
||||
consoleObject->putDirectBuiltinFunction(vm, this, publicName, codeGenerator(vm), PropertyAttribute::Builtin | 0);
|
||||
}
|
||||
};
|
||||
|
||||
wrapConsoleMethod(clientData->builtinNames().logPublicName(), clientData->builtinNames().logPrivateName(), consoleObjectLogCodeGenerator);
|
||||
wrapConsoleMethod(clientData->builtinNames().warnPublicName(), clientData->builtinNames().warnPrivateName(), consoleObjectWarnCodeGenerator);
|
||||
wrapConsoleMethod(clientData->builtinNames().errorPublicName(), clientData->builtinNames().errorPrivateName(), consoleObjectErrorCodeGenerator);
|
||||
wrapConsoleMethod(clientData->builtinNames().infoPublicName(), clientData->builtinNames().infoPrivateName(), consoleObjectInfoCodeGenerator);
|
||||
wrapConsoleMethod(clientData->builtinNames().debugPublicName(), clientData->builtinNames().debugPrivateName(), consoleObjectDebugCodeGenerator);
|
||||
}
|
||||
|
||||
// ===================== start conditional builtin globals =====================
|
||||
|
||||
@@ -960,7 +960,7 @@ fn writeFileWithEmptySourceToDestination(ctx: *jsc.JSGlobalObject, destination_b
|
||||
|
||||
const promise = jsc.JSPromise.Strong.init(ctx);
|
||||
const promise_value = promise.value();
|
||||
const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
destination_store.ref();
|
||||
try S3.upload(
|
||||
@@ -1102,7 +1102,7 @@ pub fn writeFileWithSourceDestination(ctx: *jsc.JSGlobalObject, source_blob: *Bl
|
||||
return jsc.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.takeException(err));
|
||||
};
|
||||
defer aws_options.deinit();
|
||||
const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
switch (source_store.data) {
|
||||
.bytes => |bytes| {
|
||||
@@ -1390,7 +1390,7 @@ pub fn writeFileInternal(globalThis: *jsc.JSGlobalObject, path_or_blob_: *PathOr
|
||||
destination_blob.detach();
|
||||
return globalThis.throwInvalidArguments("ReadableStream has already been used", .{});
|
||||
}
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
|
||||
return S3.uploadStream(
|
||||
@@ -1454,7 +1454,7 @@ pub fn writeFileInternal(globalThis: *jsc.JSGlobalObject, path_or_blob_: *PathOr
|
||||
destination_blob.detach();
|
||||
return globalThis.throwInvalidArguments("ReadableStream has already been used", .{});
|
||||
}
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
return S3.uploadStream(
|
||||
(if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()),
|
||||
@@ -2266,13 +2266,13 @@ const S3BlobDownloadTask = struct {
|
||||
if (blob.offset > 0) {
|
||||
const len: ?usize = if (blob.size != Blob.max_size) @intCast(blob.size) else null;
|
||||
const offset: usize = @intCast(blob.offset);
|
||||
try S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
try S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
} else if (blob.size == Blob.max_size) {
|
||||
try S3.download(credentials, path, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
try S3.download(credentials, path, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
} else {
|
||||
const len: usize = @intCast(blob.size);
|
||||
const offset: usize = @intCast(blob.offset);
|
||||
try S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
try S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
}
|
||||
return promise;
|
||||
}
|
||||
@@ -2432,7 +2432,7 @@ pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *jsc.JSGlobalObject, re
|
||||
defer aws_options.deinit();
|
||||
|
||||
const path = s3.path();
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
|
||||
return S3.uploadStream(
|
||||
@@ -2646,7 +2646,7 @@ pub fn getWriter(
|
||||
if (this.isS3()) {
|
||||
const s3 = &this.store.?.data.s3;
|
||||
const path = s3.path();
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
if (arguments.len > 0) {
|
||||
const options = arguments.ptr[0];
|
||||
|
||||
@@ -332,7 +332,7 @@ pub fn fromBlobCopyRef(globalThis: *JSGlobalObject, blob: *const Blob, recommend
|
||||
.s3 => |*s3| {
|
||||
const credentials = s3.getCredentials();
|
||||
const path = s3.path();
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy_url = if (proxy) |p| p.href else null;
|
||||
|
||||
return bun.S3.readableStream(credentials, path, blob.offset, if (blob.size != Blob.max_size) blob.size else null, proxy_url, s3.request_payer, globalThis);
|
||||
|
||||
@@ -228,8 +228,11 @@ pub inline fn detachReadableStream(this: *Request, globalObject: *jsc.JSGlobalOb
|
||||
|
||||
pub fn toJS(this: *Request, globalObject: *JSGlobalObject) JSValue {
|
||||
this.calculateEstimatedByteSize();
|
||||
const js_value = js.toJSUnchecked(globalObject, this);
|
||||
this.#js_ref = .initWeak(js_value);
|
||||
|
||||
this.checkBodyStreamRef(globalObject);
|
||||
return js.toJSUnchecked(globalObject, this);
|
||||
return js_value;
|
||||
}
|
||||
|
||||
extern "C" fn Bun__JSRequest__createForBake(globalObject: *jsc.JSGlobalObject, requestPtr: *Request) callconv(jsc.conv) jsc.JSValue;
|
||||
|
||||
@@ -421,7 +421,7 @@ pub const S3BlobStatTask = struct {
|
||||
const path = s3_store.path();
|
||||
const env = globalThis.bunVM().transpiler.env;
|
||||
|
||||
try S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3ExistsResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
try S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3ExistsResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
return promise;
|
||||
}
|
||||
pub fn stat(globalThis: *jsc.JSGlobalObject, blob: *Blob) bun.JSTerminated!JSValue {
|
||||
@@ -437,7 +437,7 @@ pub const S3BlobStatTask = struct {
|
||||
const path = s3_store.path();
|
||||
const env = globalThis.bunVM().transpiler.env;
|
||||
|
||||
try S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3StatResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
try S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3StatResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
return promise;
|
||||
}
|
||||
pub fn size(globalThis: *jsc.JSGlobalObject, blob: *Blob) bun.JSTerminated!JSValue {
|
||||
@@ -453,7 +453,7 @@ pub const S3BlobStatTask = struct {
|
||||
const path = s3_store.path();
|
||||
const env = globalThis.bunVM().transpiler.env;
|
||||
|
||||
try S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3SizeResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
try S3.stat(credentials, path, @ptrCast(&S3BlobStatTask.onS3SizeResolved), this, if (env.getHttpProxy(true, null, null)) |proxy| proxy.href else null, s3_store.request_payer);
|
||||
return promise;
|
||||
}
|
||||
|
||||
|
||||
@@ -356,7 +356,7 @@ pub const S3 = struct {
|
||||
};
|
||||
const promise = jsc.JSPromise.Strong.init(globalThis);
|
||||
const value = promise.value();
|
||||
const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy = if (proxy_url) |url| url.href else null;
|
||||
var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis);
|
||||
defer aws_options.deinit();
|
||||
@@ -414,7 +414,7 @@ pub const S3 = struct {
|
||||
|
||||
const promise = jsc.JSPromise.Strong.init(globalThis);
|
||||
const value = promise.value();
|
||||
const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null);
|
||||
const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null, null);
|
||||
const proxy = if (proxy_url) |url| url.href else null;
|
||||
var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis);
|
||||
defer aws_options.deinit();
|
||||
|
||||
@@ -156,14 +156,17 @@ pub const Loader = struct {
|
||||
}
|
||||
|
||||
pub fn getHttpProxyFor(this: *Loader, url: URL) ?URL {
|
||||
return this.getHttpProxy(url.isHTTP(), url.hostname);
|
||||
return this.getHttpProxy(url.isHTTP(), url.hostname, url.host);
|
||||
}
|
||||
|
||||
pub fn hasHTTPProxy(this: *const Loader) bool {
|
||||
return this.has("http_proxy") or this.has("HTTP_PROXY") or this.has("https_proxy") or this.has("HTTPS_PROXY");
|
||||
}
|
||||
|
||||
pub fn getHttpProxy(this: *Loader, is_http: bool, hostname: ?[]const u8) ?URL {
|
||||
/// Get proxy URL for HTTP/HTTPS requests, respecting NO_PROXY.
|
||||
/// `hostname` is the host without port (e.g., "localhost")
|
||||
/// `host` is the host with port if present (e.g., "localhost:3000")
|
||||
pub fn getHttpProxy(this: *Loader, is_http: bool, hostname: ?[]const u8, host: ?[]const u8) ?URL {
|
||||
// TODO: When Web Worker support is added, make sure to intern these strings
|
||||
var http_proxy: ?URL = null;
|
||||
|
||||
@@ -191,23 +194,54 @@ pub const Loader = struct {
|
||||
|
||||
var no_proxy_iter = std.mem.splitScalar(u8, no_proxy_text, ',');
|
||||
while (no_proxy_iter.next()) |no_proxy_item| {
|
||||
var host = strings.trim(no_proxy_item, &strings.whitespace_chars);
|
||||
if (host.len == 0) {
|
||||
var no_proxy_entry = strings.trim(no_proxy_item, &strings.whitespace_chars);
|
||||
if (no_proxy_entry.len == 0) {
|
||||
continue;
|
||||
}
|
||||
if (strings.eql(host, "*")) {
|
||||
if (strings.eql(no_proxy_entry, "*")) {
|
||||
return null;
|
||||
}
|
||||
//strips .
|
||||
if (strings.startsWithChar(host, '.')) {
|
||||
host = host[1..];
|
||||
if (host.len == 0) {
|
||||
if (strings.startsWithChar(no_proxy_entry, '.')) {
|
||||
no_proxy_entry = no_proxy_entry[1..];
|
||||
if (no_proxy_entry.len == 0) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
//hostname ends with suffix
|
||||
if (strings.endsWith(hostname.?, host)) {
|
||||
return null;
|
||||
|
||||
// Determine if entry contains a port or is an IPv6 address
|
||||
// IPv6 addresses contain multiple colons (e.g., "::1", "2001:db8::1")
|
||||
// Bracketed IPv6 with port: "[::1]:8080"
|
||||
// Host with port: "localhost:8080" (single colon)
|
||||
const colon_count = std.mem.count(u8, no_proxy_entry, ":");
|
||||
const is_bracketed_ipv6 = strings.startsWithChar(no_proxy_entry, '[');
|
||||
const has_port = blk: {
|
||||
if (is_bracketed_ipv6) {
|
||||
// Bracketed IPv6: check for "]:port" pattern
|
||||
if (std.mem.indexOf(u8, no_proxy_entry, "]:")) |_| {
|
||||
break :blk true;
|
||||
}
|
||||
break :blk false;
|
||||
} else if (colon_count == 1) {
|
||||
// Single colon means host:port (not IPv6)
|
||||
break :blk true;
|
||||
}
|
||||
// Multiple colons without brackets = bare IPv6 literal (no port)
|
||||
break :blk false;
|
||||
};
|
||||
|
||||
if (has_port) {
|
||||
// Entry has a port, do exact match against host:port
|
||||
if (host) |h| {
|
||||
if (strings.eqlCaseInsensitiveASCII(h, no_proxy_entry, true)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Entry is hostname/IPv6 only, match against hostname (suffix match)
|
||||
if (strings.endsWith(hostname.?, no_proxy_entry)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -621,6 +621,40 @@ pub fn enqueueDependencyWithMainAndSuccessFn(
|
||||
}
|
||||
return;
|
||||
},
|
||||
error.MissingPackageJSON => {
|
||||
if (dependency.behavior.isRequired()) {
|
||||
if (failFn) |fail| {
|
||||
fail(
|
||||
this,
|
||||
dependency,
|
||||
id,
|
||||
err,
|
||||
);
|
||||
} else if (version.tag == .folder) {
|
||||
this.log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
this.allocator,
|
||||
"Could not find package.json for \"file:{s}\" dependency \"{s}\"",
|
||||
.{
|
||||
this.lockfile.str(&version.value.folder),
|
||||
this.lockfile.str(&name),
|
||||
},
|
||||
) catch unreachable;
|
||||
} else {
|
||||
this.log.addErrorFmt(
|
||||
null,
|
||||
logger.Loc.Empty,
|
||||
this.allocator,
|
||||
"Could not find package.json for dependency \"{s}\"",
|
||||
.{
|
||||
this.lockfile.str(&name),
|
||||
},
|
||||
) catch unreachable;
|
||||
}
|
||||
}
|
||||
return;
|
||||
},
|
||||
else => {
|
||||
if (failFn) |fail| {
|
||||
fail(
|
||||
|
||||
@@ -91,7 +91,6 @@ using namespace JSC;
|
||||
macro(cwd) \
|
||||
macro(data) \
|
||||
macro(dataView) \
|
||||
macro(debug) \
|
||||
macro(decode) \
|
||||
macro(delimiter) \
|
||||
macro(dest) \
|
||||
@@ -106,7 +105,6 @@ using namespace JSC;
|
||||
macro(encoding) \
|
||||
macro(end) \
|
||||
macro(errno) \
|
||||
macro(error) \
|
||||
macro(errorSteps) \
|
||||
macro(evaluateCommonJSModule) \
|
||||
macro(evaluated) \
|
||||
@@ -141,7 +139,6 @@ using namespace JSC;
|
||||
macro(httpOnly) \
|
||||
macro(ignoreBOM) \
|
||||
macro(importer) \
|
||||
macro(info) \
|
||||
macro(inFlightCloseRequest) \
|
||||
macro(inFlightWriteRequest) \
|
||||
macro(inherits) \
|
||||
@@ -162,7 +159,6 @@ using namespace JSC;
|
||||
macro(lineText) \
|
||||
macro(loadEsmIntoCjs) \
|
||||
macro(localStreams) \
|
||||
macro(log) \
|
||||
macro(main) \
|
||||
macro(makeAbortError) \
|
||||
macro(makeDOMException) \
|
||||
@@ -277,7 +273,6 @@ using namespace JSC;
|
||||
macro(version) \
|
||||
macro(versions) \
|
||||
macro(view) \
|
||||
macro(warn) \
|
||||
macro(warning) \
|
||||
macro(writable) \
|
||||
macro(write) \
|
||||
|
||||
@@ -1,48 +1,3 @@
|
||||
// Wrappers for console methods to handle zero-argument calls.
|
||||
// JSC's ConsoleClient skips calling messageWithTypeAndLevel when there are no arguments,
|
||||
// but Node.js prints an empty line in that case. These wrappers ensure we pass an empty
|
||||
// string when called with no arguments, so our messageWithTypeAndLevel is invoked.
|
||||
|
||||
export function log(this: Console) {
|
||||
const nativeLog = $getByIdDirectPrivate(this, "log");
|
||||
if ($argumentCount() === 0) {
|
||||
return nativeLog.$call(this, "");
|
||||
}
|
||||
return nativeLog.$apply(this, arguments);
|
||||
}
|
||||
|
||||
export function warn(this: Console) {
|
||||
const nativeWarn = $getByIdDirectPrivate(this, "warn");
|
||||
if ($argumentCount() === 0) {
|
||||
return nativeWarn.$call(this, "");
|
||||
}
|
||||
return nativeWarn.$apply(this, arguments);
|
||||
}
|
||||
|
||||
export function error(this: Console) {
|
||||
const nativeError = $getByIdDirectPrivate(this, "error");
|
||||
if ($argumentCount() === 0) {
|
||||
return nativeError.$call(this, "");
|
||||
}
|
||||
return nativeError.$apply(this, arguments);
|
||||
}
|
||||
|
||||
export function info(this: Console) {
|
||||
const nativeInfo = $getByIdDirectPrivate(this, "info");
|
||||
if ($argumentCount() === 0) {
|
||||
return nativeInfo.$call(this, "");
|
||||
}
|
||||
return nativeInfo.$apply(this, arguments);
|
||||
}
|
||||
|
||||
export function debug(this: Console) {
|
||||
const nativeDebug = $getByIdDirectPrivate(this, "debug");
|
||||
if ($argumentCount() === 0) {
|
||||
return nativeDebug.$call(this, "");
|
||||
}
|
||||
return nativeDebug.$apply(this, arguments);
|
||||
}
|
||||
|
||||
$overriddenName = "[Symbol.asyncIterator]";
|
||||
export function asyncIterator(this: Console) {
|
||||
var stream = Bun.stdin.stream();
|
||||
|
||||
@@ -111,8 +111,8 @@ it("should reject missing package", async () => {
|
||||
env,
|
||||
});
|
||||
const err = await stderr.text();
|
||||
expect(err).toContain("error: MissingPackageJSON");
|
||||
expect(err).toContain(`note: error occurred while resolving file:${add_path}`);
|
||||
expect(err).toContain(`error: Could not find package.json for "file:${add_path}" dependency`);
|
||||
expect(err).toContain("failed to resolve");
|
||||
|
||||
const out = await stdout.text();
|
||||
expect(out).toEqual(expect.stringContaining("bun add v1."));
|
||||
|
||||
@@ -15,10 +15,17 @@ beforeAll(() => {
|
||||
|
||||
// simple http proxy
|
||||
if (request.url.startsWith("http://")) {
|
||||
return await fetch(request.url, {
|
||||
const response = await fetch(request.url, {
|
||||
method: request.method,
|
||||
body: await request.text(),
|
||||
});
|
||||
// Add marker header to indicate request went through proxy
|
||||
const headers = new Headers(response.headers);
|
||||
headers.set("x-proxy-used", "1");
|
||||
return new Response(response.body, {
|
||||
status: response.status,
|
||||
headers,
|
||||
});
|
||||
}
|
||||
|
||||
// no TLS support here
|
||||
@@ -257,4 +264,129 @@ describe.concurrent(() => {
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
// Test that NO_PROXY respects port numbers like Node.js and curl do
|
||||
describe("NO_PROXY port handling", () => {
|
||||
it("should bypass proxy when NO_PROXY matches host:port exactly", async () => {
|
||||
// NO_PROXY includes the exact host:port, should bypass proxy
|
||||
const {
|
||||
exited,
|
||||
stdout,
|
||||
stderr: stderrStream,
|
||||
} = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`const resp = await fetch("http://localhost:${server.port}/test"); console.log(resp.headers.get("x-proxy-used") || "no-proxy");`,
|
||||
],
|
||||
env: {
|
||||
...bunEnv,
|
||||
http_proxy: `http://localhost:${proxy.port}`,
|
||||
NO_PROXY: `localhost:${server.port}`,
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [exitCode, out, stderr] = await Promise.all([exited, stdout.text(), stderrStream.text()]);
|
||||
if (exitCode !== 0) {
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
// Should connect directly, not through proxy (no x-proxy-used header)
|
||||
expect(out.trim()).toBe("no-proxy");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it("should use proxy when NO_PROXY has different port", async () => {
|
||||
const differentPort = server.port + 1000;
|
||||
// NO_PROXY includes a different port, should NOT bypass proxy
|
||||
const {
|
||||
exited,
|
||||
stdout,
|
||||
stderr: stderrStream,
|
||||
} = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`const resp = await fetch("http://localhost:${server.port}/test"); console.log(resp.headers.get("x-proxy-used") || "no-proxy");`,
|
||||
],
|
||||
env: {
|
||||
...bunEnv,
|
||||
http_proxy: `http://localhost:${proxy.port}`,
|
||||
NO_PROXY: `localhost:${differentPort}`,
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [exitCode, out, stderr] = await Promise.all([exited, stdout.text(), stderrStream.text()]);
|
||||
if (exitCode !== 0) {
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
// The proxy adds x-proxy-used header, verify it was used
|
||||
expect(out.trim()).toBe("1");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it("should bypass proxy when NO_PROXY has host only (no port)", async () => {
|
||||
// NO_PROXY includes just the host (no port), should bypass proxy for all ports
|
||||
const {
|
||||
exited,
|
||||
stdout,
|
||||
stderr: stderrStream,
|
||||
} = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`const resp = await fetch("http://localhost:${server.port}/test"); console.log(resp.headers.get("x-proxy-used") || "no-proxy");`,
|
||||
],
|
||||
env: {
|
||||
...bunEnv,
|
||||
http_proxy: `http://localhost:${proxy.port}`,
|
||||
NO_PROXY: `localhost`,
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [exitCode, out, stderr] = await Promise.all([exited, stdout.text(), stderrStream.text()]);
|
||||
if (exitCode !== 0) {
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
// Should connect directly, not through proxy (no x-proxy-used header)
|
||||
expect(out.trim()).toBe("no-proxy");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it("should handle NO_PROXY with multiple entries including port", async () => {
|
||||
const differentPort = server.port + 1000;
|
||||
// NO_PROXY includes multiple entries, one of which matches exactly
|
||||
const {
|
||||
exited,
|
||||
stdout,
|
||||
stderr: stderrStream,
|
||||
} = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"-e",
|
||||
`const resp = await fetch("http://localhost:${server.port}/test"); console.log(resp.headers.get("x-proxy-used") || "no-proxy");`,
|
||||
],
|
||||
env: {
|
||||
...bunEnv,
|
||||
http_proxy: `http://localhost:${proxy.port}`,
|
||||
NO_PROXY: `example.com, localhost:${differentPort}, localhost:${server.port}`,
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [exitCode, out, stderr] = await Promise.all([exited, stdout.text(), stderrStream.text()]);
|
||||
if (exitCode !== 0) {
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
// Should connect directly, not through proxy (no x-proxy-used header)
|
||||
expect(out.trim()).toBe("no-proxy");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
|
||||
// Test for https://github.com/oven-sh/bun/issues/26151
|
||||
// console.log() with zero arguments should print an empty line, matching Node.js behavior
|
||||
|
||||
describe("console methods with zero arguments print empty line", () => {
|
||||
test("console.log()", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", `console.log("foo"); console.log(); console.log("bar");`],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toBe("foo\n\nbar\n");
|
||||
expect(stderr).toBe("");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("console.info()", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", `console.info("foo"); console.info(); console.info("bar");`],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toBe("foo\n\nbar\n");
|
||||
expect(stderr).toBe("");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("console.debug()", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", `console.debug("foo"); console.debug(); console.debug("bar");`],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toBe("foo\n\nbar\n");
|
||||
expect(stderr).toBe("");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("console.warn()", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", `console.warn("foo"); console.warn(); console.warn("bar");`],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toBe("");
|
||||
expect(stderr).toBe("foo\n\nbar\n");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("console.error()", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", `console.error("foo"); console.error(); console.error("bar");`],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toBe("");
|
||||
expect(stderr).toBe("foo\n\nbar\n");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("console methods with arguments still work normally", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", `console.log("hello", "world"); console.log(123); console.log({ foo: "bar" });`],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
expect(stdout).toContain("hello world");
|
||||
expect(stdout).toContain("123");
|
||||
expect(stdout).toContain("foo");
|
||||
expect(stdout).toContain("bar");
|
||||
expect(stderr).toBe("");
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
@@ -67,10 +67,11 @@ describe("issue #26337 - missing file: dependency error should show dependency n
|
||||
failProc.exited,
|
||||
]);
|
||||
|
||||
// The error output should mention the dependency name
|
||||
// The error output should mention the dependency name and path
|
||||
const output = stdout + stderr;
|
||||
expect(output).toContain("@scope/dep");
|
||||
expect(output).toContain("error occurred while resolving");
|
||||
expect(output).toContain("file:./nonexistent/path");
|
||||
expect(output).toContain("failed to resolve");
|
||||
|
||||
// The install should fail
|
||||
expect(exitCode).toBe(1);
|
||||
|
||||
57
test/regression/issue/26338.test.ts
Normal file
57
test/regression/issue/26338.test.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
// https://github.com/oven-sh/bun/issues/26338
|
||||
// Test that when a lockfile references a stale file: dependency path,
|
||||
// the error message correctly identifies the missing dependency path
|
||||
// instead of showing "Bun could not find a package.json file to install from"
|
||||
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
test("shows informative error for missing file: dependency path", async () => {
|
||||
// Create a directory structure where:
|
||||
// - package.json references a path that doesn't exist
|
||||
// - bun.lock references a different (also non-existent) path
|
||||
// This simulates a stale lockfile scenario
|
||||
using dir = tempDir("missing-file-dep", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "app",
|
||||
version: "1.0.0",
|
||||
dependencies: {
|
||||
dep: "file:../packages/@scope/dep",
|
||||
},
|
||||
}),
|
||||
"bun.lock": JSON.stringify({
|
||||
lockfileVersion: 1,
|
||||
configVersion: 1,
|
||||
workspaces: {
|
||||
"": {
|
||||
name: "app",
|
||||
dependencies: {
|
||||
dep: "file:../dep",
|
||||
},
|
||||
},
|
||||
},
|
||||
packages: {
|
||||
dep: ["dep@file:../dep", {}],
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// The error should mention the file: path, not a generic "package.json not found" message
|
||||
expect(stderr).toContain("file:../packages/@scope/dep");
|
||||
// Check that the dependency name "dep" appears as a quoted token (not just as part of "dependency")
|
||||
expect(stderr).toMatch(/"dep"/);
|
||||
expect(stderr).not.toContain("Bun could not find a package.json file to install from");
|
||||
expect(stderr).not.toContain('Run "bun init" to initialize a project');
|
||||
|
||||
expect(exitCode).not.toBe(0);
|
||||
});
|
||||
139
test/regression/issue/26360.test.ts
Normal file
139
test/regression/issue/26360.test.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDir } from "harness";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/26360
|
||||
// Bug: Bun.build API hangs indefinitely when called from within a macro that is
|
||||
// evaluated during another Bun.build call. The CLI `bun build` works correctly.
|
||||
//
|
||||
// Root cause: The bundler uses a singleton thread for processing Bun.build calls.
|
||||
// When a macro is evaluated during bundling and that macro calls Bun.build:
|
||||
// 1. The singleton bundler thread is processing the outer Bun.build
|
||||
// 2. The macro runs on the bundler thread and calls Bun.build
|
||||
// 3. The inner Bun.build tries to enqueue to the same singleton thread
|
||||
// 4. The singleton thread is blocked waiting for the macro to complete -> deadlock
|
||||
//
|
||||
// Fix: Detect when Bun.build is called from within macro mode and throw a clear error.
|
||||
|
||||
test("Bun.build from macro during bundling throws instead of hanging", async () => {
|
||||
using dir = tempDir("issue-26360", {
|
||||
// A simple file that will be bundled by the macro
|
||||
"browser.ts": `console.log("browser code");
|
||||
export default "";
|
||||
`,
|
||||
|
||||
// A macro that calls Bun.build and catches the error
|
||||
// The error should indicate that Bun.build cannot be called from macro context
|
||||
"macro.ts": `import browserCode from "./browser" with { type: "file" };
|
||||
|
||||
let errorMessage = "no error";
|
||||
try {
|
||||
const built = await Bun.build({
|
||||
entrypoints: [browserCode],
|
||||
format: "esm",
|
||||
});
|
||||
} catch (e) {
|
||||
errorMessage = "CAUGHT: " + e.message;
|
||||
}
|
||||
export const getErrorMessage = (): string => errorMessage;
|
||||
`,
|
||||
|
||||
// File that imports from the macro
|
||||
"index.ts": `import { getErrorMessage } from "./macro" with { type: "macro" };
|
||||
console.log("ERROR_MSG:", getErrorMessage());
|
||||
`,
|
||||
|
||||
// Build script that uses Bun.build API (this would hang before the fix)
|
||||
"build_script.ts": `const result = await Bun.build({
|
||||
entrypoints: ["./index.ts"],
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
console.log("BUILD_ERROR");
|
||||
for (const log of result.logs) {
|
||||
console.log(log.message);
|
||||
}
|
||||
} else {
|
||||
console.log("BUILD_SUCCESS");
|
||||
// Print the output to verify the macro caught the error
|
||||
const text = await result.outputs[0].text();
|
||||
console.log(text);
|
||||
}
|
||||
`,
|
||||
});
|
||||
|
||||
// Run the build script - should complete (not hang) and the macro should have caught the error
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build_script.ts"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// The build should succeed (the macro catches the error)
|
||||
expect(stdout).toContain("BUILD_SUCCESS");
|
||||
// The macro should have received the error message about Bun.build not being allowed
|
||||
expect(stdout).toContain("Bun.build cannot be called from within a macro");
|
||||
});
|
||||
|
||||
test("CLI bun build with macro that calls Bun.build also throws", async () => {
|
||||
using dir = tempDir("issue-26360-cli", {
|
||||
"browser.ts": `console.log("browser code");
|
||||
export default "";
|
||||
`,
|
||||
|
||||
// A macro that calls Bun.build and catches the error
|
||||
"macro.ts": `import browserCode from "./browser" with { type: "file" };
|
||||
|
||||
let errorMessage = "";
|
||||
try {
|
||||
const built = await Bun.build({
|
||||
entrypoints: [browserCode],
|
||||
format: "esm",
|
||||
});
|
||||
} catch (e) {
|
||||
errorMessage = e.message;
|
||||
}
|
||||
export const getErrorMessage = (): string => errorMessage;
|
||||
`,
|
||||
|
||||
"index.ts": `import { getErrorMessage } from "./macro" with { type: "macro" };
|
||||
console.log("ERROR_MSG:", getErrorMessage());
|
||||
`,
|
||||
});
|
||||
|
||||
// Run via CLI
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "build", "index.ts", "--target=node"],
|
||||
cwd: String(dir),
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
|
||||
|
||||
// The CLI build should also work and show the error message was caught
|
||||
expect(stdout).toContain("Bun.build cannot be called from within a macro");
|
||||
});
|
||||
|
||||
test("regular Bun.build (not in macro) still works", async () => {
|
||||
using dir = tempDir("issue-26360-normal", {
|
||||
"entry.ts": `
|
||||
console.log("hello world");
|
||||
export default "";
|
||||
`,
|
||||
});
|
||||
|
||||
const result = await Bun.build({
|
||||
entrypoints: [`${dir}/entry.ts`],
|
||||
format: "esm",
|
||||
});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.outputs.length).toBeGreaterThan(0);
|
||||
const text = await result.outputs[0].text();
|
||||
expect(text).toContain("hello world");
|
||||
});
|
||||
46
test/regression/issue/26387.test.ts
Normal file
46
test/regression/issue/26387.test.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/26387
|
||||
// Request.text() fails with "TypeError: undefined is not a function" after ~4500 requests
|
||||
test("Request.text() should work after many requests", async () => {
|
||||
// Create a server that reads the request body using req.text()
|
||||
using server = Bun.serve({
|
||||
port: 0,
|
||||
async fetch(req) {
|
||||
try {
|
||||
const body = await req.text();
|
||||
return new Response("ok:" + body.length);
|
||||
} catch (e) {
|
||||
return new Response(`error: ${e}`, { status: 500 });
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const url = `http://localhost:${server.port}`;
|
||||
|
||||
// Send many requests to trigger the GC conditions that caused the bug
|
||||
// The original bug occurred around 4500 requests, but we use a higher number
|
||||
// to ensure we trigger any GC-related issues
|
||||
const requestCount = 6000;
|
||||
|
||||
for (let i = 0; i < requestCount; i++) {
|
||||
const body = Buffer.alloc(100, "x").toString() + `-request-${i}`;
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
body: body,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text();
|
||||
throw new Error(`Request ${i} failed: ${text}`);
|
||||
}
|
||||
|
||||
const responseText = await response.text();
|
||||
expect(responseText).toBe(`ok:${body.length}`);
|
||||
|
||||
// Periodically run GC to increase likelihood of triggering the bug
|
||||
if (i % 500 === 0) {
|
||||
Bun.gc(true);
|
||||
}
|
||||
}
|
||||
}, 60000);
|
||||
86
test/regression/issue/26394.test.ts
Normal file
86
test/regression/issue/26394.test.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
// https://github.com/oven-sh/bun/issues/26394
|
||||
// Race condition in Bun.serve() where requests can arrive before routes are fully registered,
|
||||
// causing the default "Welcome to Bun!" response instead of the configured handler's response.
|
||||
|
||||
test("concurrent Bun.serve instances should not return Welcome to Bun", async () => {
|
||||
const serverCount = 60;
|
||||
const servers: ReturnType<typeof Bun.serve>[] = [];
|
||||
|
||||
try {
|
||||
// Create many servers concurrently
|
||||
for (let i = 0; i < serverCount; i++) {
|
||||
servers.push(
|
||||
Bun.serve({
|
||||
port: 0,
|
||||
fetch: () => new Response("OK"),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// Make concurrent requests to all servers
|
||||
const responses = await Promise.all(
|
||||
servers.map(async server => {
|
||||
const res = await fetch(`http://127.0.0.1:${server.port}/`);
|
||||
return res.text();
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify no "Welcome to Bun!" responses - check for both debug mode message and production mode
|
||||
for (let i = 0; i < responses.length; i++) {
|
||||
expect(responses[i]).not.toContain("Welcome to Bun");
|
||||
expect(responses[i]).not.toBe(""); // Production mode returns empty for renderMissing
|
||||
expect(responses[i]).toBe("OK");
|
||||
}
|
||||
} finally {
|
||||
// Clean up - guaranteed to run even if assertions fail
|
||||
for (const server of servers) {
|
||||
server.stop();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("Bun.serve should be ready to handle requests immediately after returning", async () => {
|
||||
// Test a single server with immediate fetch - this tests if the server is ready synchronously
|
||||
using server = Bun.serve({
|
||||
port: 0,
|
||||
fetch: () => new Response("handler response"),
|
||||
});
|
||||
|
||||
// Immediately fetch - if there's a race condition, this might return "Welcome to Bun!"
|
||||
const response = await fetch(`http://127.0.0.1:${server.port}/`);
|
||||
const text = await response.text();
|
||||
|
||||
expect(text).toBe("handler response");
|
||||
});
|
||||
|
||||
test("multiple sequential Bun.serve instances with immediate requests", async () => {
|
||||
// Create servers sequentially and immediately request from each
|
||||
const results: string[] = [];
|
||||
const servers: ReturnType<typeof Bun.serve>[] = [];
|
||||
|
||||
try {
|
||||
for (let i = 0; i < 20; i++) {
|
||||
const server = Bun.serve({
|
||||
port: 0,
|
||||
fetch: () => new Response(`server-${i}`),
|
||||
});
|
||||
servers.push(server);
|
||||
|
||||
// Immediately fetch from the server
|
||||
const response = await fetch(`http://127.0.0.1:${server.port}/`);
|
||||
results.push(await response.text());
|
||||
}
|
||||
|
||||
// Verify all responses match expected
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
expect(results[i]).toBe(`server-${i}`);
|
||||
}
|
||||
} finally {
|
||||
// Clean up - guaranteed to run even if assertions fail
|
||||
for (const server of servers) {
|
||||
server.stop();
|
||||
}
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user