Merge branch 'main' into claude/fix-require-print-exception-25653

This commit is contained in:
Jarred Sumner
2026-01-14 16:10:22 -08:00
committed by GitHub
31 changed files with 2555 additions and 141 deletions

View File

@@ -1159,7 +1159,9 @@ pub const StandaloneModuleGraph = struct {
return .success;
}
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {
/// Loads the standalone module graph from the executable, allocates it on the heap,
/// sets it globally, and returns the pointer.
pub fn fromExecutable(allocator: std.mem.Allocator) !?*StandaloneModuleGraph {
if (comptime Environment.isMac) {
const macho_bytes = Macho.getData() orelse return null;
if (macho_bytes.len < @sizeOf(Offsets) + trailer.len) {
@@ -1173,7 +1175,7 @@ pub const StandaloneModuleGraph = struct {
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, macho_bytes_slice).*;
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(macho_bytes), offsets);
return try fromBytesAlloc(allocator, @constCast(macho_bytes), offsets);
}
if (comptime Environment.isWindows) {
@@ -1189,7 +1191,7 @@ pub const StandaloneModuleGraph = struct {
return null;
}
const offsets = std.mem.bytesAsValue(Offsets, pe_bytes_slice).*;
return try StandaloneModuleGraph.fromBytes(allocator, @constCast(pe_bytes), offsets);
return try fromBytesAlloc(allocator, @constCast(pe_bytes), offsets);
}
// Do not invoke libuv here.
@@ -1284,7 +1286,15 @@ pub const StandaloneModuleGraph = struct {
}
}
return try StandaloneModuleGraph.fromBytes(allocator, to_read, offsets);
return try fromBytesAlloc(allocator, to_read, offsets);
}
/// Allocates a StandaloneModuleGraph on the heap, populates it from bytes, sets it globally, and returns the pointer.
fn fromBytesAlloc(allocator: std.mem.Allocator, raw_bytes: []u8, offsets: Offsets) !*StandaloneModuleGraph {
const graph_ptr = try allocator.create(StandaloneModuleGraph);
graph_ptr.* = try StandaloneModuleGraph.fromBytes(allocator, raw_bytes, offsets);
graph_ptr.set();
return graph_ptr;
}
/// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".

View File

@@ -13,21 +13,18 @@ pub const Run = struct {
var run: Run = undefined;
pub fn bootStandalone(ctx: Command.Context, entry_path: string, graph: bun.StandaloneModuleGraph) !void {
pub fn bootStandalone(ctx: Command.Context, entry_path: string, graph_ptr: *bun.StandaloneModuleGraph) !void {
jsc.markBinding(@src());
bun.jsc.initialize(false);
bun.analytics.Features.standalone_executable += 1;
const graph_ptr = try bun.default_allocator.create(bun.StandaloneModuleGraph);
graph_ptr.* = graph;
graph_ptr.set();
js_ast.Expr.Data.Store.create();
js_ast.Stmt.Data.Store.create();
const arena = Arena.init();
// Load bunfig.toml unless disabled by compile flags
if (!ctx.debug.loaded_bunfig and !graph.flags.disable_autoload_bunfig) {
// Note: config loading with execArgv is handled earlier in cli.zig via loadConfig
if (!ctx.debug.loaded_bunfig and !graph_ptr.flags.disable_autoload_bunfig) {
try bun.cli.Arguments.loadConfigPath(ctx.allocator, true, "bunfig.toml", ctx, .RunCommand);
}
@@ -87,7 +84,7 @@ pub const Run = struct {
// If .env loading is disabled, only load process env vars
// Otherwise, load all .env files
if (graph.flags.disable_default_env_files) {
if (graph_ptr.flags.disable_default_env_files) {
b.options.env.behavior = .disable;
} else {
b.options.env.behavior = .load_all_without_inlining;
@@ -95,8 +92,8 @@ pub const Run = struct {
// Control loading of tsconfig.json and package.json at runtime
// By default, these are disabled for standalone executables
b.resolver.opts.load_tsconfig_json = !graph.flags.disable_autoload_tsconfig;
b.resolver.opts.load_package_json = !graph.flags.disable_autoload_package_json;
b.resolver.opts.load_tsconfig_json = !graph_ptr.flags.disable_autoload_tsconfig;
b.resolver.opts.load_package_json = !graph_ptr.flags.disable_autoload_package_json;
b.configureDefines() catch {
failWithBuildError(vm);

View File

@@ -316,6 +316,7 @@ pub const TestReporterAgent = struct {
pub const Handle = opaque {
extern "c" fn Bun__TestReporterAgentReportTestFound(agent: *Handle, callFrame: *jsc.CallFrame, testId: c_int, name: *bun.String, item_type: TestType, parentId: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestFoundWithLocation(agent: *Handle, testId: c_int, name: *bun.String, item_type: TestType, parentId: c_int, sourceURL: *bun.String, line: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestStart(agent: *Handle, testId: c_int) void;
extern "c" fn Bun__TestReporterAgentReportTestEnd(agent: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void;
@@ -323,6 +324,10 @@ pub const TestReporterAgent = struct {
Bun__TestReporterAgentReportTestFound(this, callFrame, testId, name, item_type, parentId);
}
pub fn reportTestFoundWithLocation(this: *Handle, testId: i32, name: *bun.String, item_type: TestType, parentId: i32, sourceURL: *bun.String, line: i32) void {
Bun__TestReporterAgentReportTestFoundWithLocation(this, testId, name, item_type, parentId, sourceURL, line);
}
pub fn reportTestStart(this: *Handle, testId: c_int) void {
Bun__TestReporterAgentReportTestStart(this, testId);
}
@@ -335,8 +340,88 @@ pub const TestReporterAgent = struct {
if (VirtualMachine.get().debugger) |*debugger| {
debug("enable", .{});
debugger.test_reporter_agent.handle = agent;
// Retroactively report any tests that were already discovered before the debugger connected
retroactivelyReportDiscoveredTests(agent);
}
}
/// When TestReporter.enable is called after test collection has started/finished,
/// we need to retroactively assign test IDs and report discovered tests.
fn retroactivelyReportDiscoveredTests(agent: *Handle) void {
const Jest = jsc.Jest.Jest;
const runner = Jest.runner orelse return;
const active_file = runner.bun_test_root.active_file.get() orelse return;
// Only report if we're in collection or execution phase (tests have been discovered)
switch (active_file.phase) {
.collection, .execution => {},
.done => return,
}
// Get the file path for source location info
const file_path = runner.files.get(active_file.file_id).source.path.text;
var source_url = bun.String.init(file_path);
// Track the maximum ID we assign
var max_id: i32 = 0;
// Recursively report all discovered tests starting from root scope
const root_scope = active_file.collection.root_scope;
retroactivelyReportScope(agent, root_scope, -1, &max_id, &source_url);
debug("retroactively reported {} tests", .{max_id});
}
fn retroactivelyReportScope(agent: *Handle, scope: *bun_test.DescribeScope, parent_id: i32, max_id: *i32, source_url: *bun.String) void {
for (scope.entries.items) |*entry| {
switch (entry.*) {
.describe => |describe| {
// Only report and assign ID if not already assigned
if (describe.base.test_id_for_debugger == 0) {
max_id.* += 1;
const test_id = max_id.*;
// Assign the ID so start/end events will fire during execution
describe.base.test_id_for_debugger = test_id;
var name = bun.String.init(describe.base.name orelse "(unnamed)");
agent.reportTestFoundWithLocation(
test_id,
&name,
.describe,
parent_id,
source_url,
@intCast(describe.base.line_no),
);
// Recursively report children with this describe as parent
retroactivelyReportScope(agent, describe, test_id, max_id, source_url);
} else {
// Already has ID, just recurse with existing ID as parent
retroactivelyReportScope(agent, describe, describe.base.test_id_for_debugger, max_id, source_url);
}
},
.test_callback => |test_entry| {
// Only report and assign ID if not already assigned
if (test_entry.base.test_id_for_debugger == 0) {
max_id.* += 1;
const test_id = max_id.*;
// Assign the ID so start/end events will fire during execution
test_entry.base.test_id_for_debugger = test_id;
var name = bun.String.init(test_entry.base.name orelse "(unnamed)");
agent.reportTestFoundWithLocation(
test_id,
&name,
.@"test",
parent_id,
source_url,
@intCast(test_entry.base.line_no),
);
}
},
}
}
}
const bun_test = jsc.Jest.bun_test;
pub export fn Bun__TestReporterAgentDisable(_: *Handle) void {
if (VirtualMachine.get().debugger) |*debugger| {
debug("disable", .{});

View File

@@ -50,6 +50,26 @@ void Bun__TestReporterAgentReportTestFound(Inspector::InspectorTestReporterAgent
agent->reportTestFound(callFrame, testId, str, type, parentId);
}
void Bun__TestReporterAgentReportTestFoundWithLocation(Inspector::InspectorTestReporterAgent* agent, int testId, BunString* name, BunTestType item_type, int parentId, BunString* sourceURL, int line)
{
auto str = name->toWTFString(BunString::ZeroCopy);
auto sourceURLStr = sourceURL->toWTFString(BunString::ZeroCopy);
Protocol::TestReporter::TestType type;
switch (item_type) {
case BunTestType::Test:
type = Protocol::TestReporter::TestType::Test;
break;
case BunTestType::Describe:
type = Protocol::TestReporter::TestType::Describe;
break;
default:
ASSERT_NOT_REACHED();
}
agent->reportTestFoundWithLocation(testId, str, type, parentId, sourceURLStr, line);
}
void Bun__TestReporterAgentReportTestStart(Inspector::InspectorTestReporterAgent* agent, int testId)
{
agent->reportTestStart(testId);
@@ -211,6 +231,21 @@ void InspectorTestReporterAgent::reportTestFound(JSC::CallFrame* callFrame, int
parentId > 0 ? parentId : std::optional<int>());
}
void InspectorTestReporterAgent::reportTestFoundWithLocation(int testId, const String& name, Protocol::TestReporter::TestType type, int parentId, const String& sourceURL, int line)
{
if (!m_enabled)
return;
m_frontendDispatcher->found(
testId,
String(), // sourceID - not available for retroactively reported tests
sourceURL,
line,
name,
type,
parentId > 0 ? parentId : std::optional<int>());
}
void InspectorTestReporterAgent::reportTestStart(int testId)
{
if (!m_enabled || !m_frontendDispatcher)

View File

@@ -34,6 +34,7 @@ public:
// Public API for reporting test events
void reportTestFound(JSC::CallFrame*, int testId, const String& name, Protocol::TestReporter::TestType type = Protocol::TestReporter::TestType::Test, int parentId = -1);
void reportTestFoundWithLocation(int testId, const String& name, Protocol::TestReporter::TestType type, int parentId, const String& sourceURL, int line);
void reportTestStart(int testId);
void reportTestEnd(int testId, Protocol::TestReporter::TestStatus status, double elapsed);

View File

@@ -166,8 +166,14 @@ pub fn write(index: u32, graph: *const Graph, linker_graph: *const LinkerGraph,
defer already_visited_output_file.deinit(bun.default_allocator);
// Write all chunks that have files associated with this entry point.
// Also include browser chunks from server builds (lazy-loaded chunks from dynamic imports).
// When there's only one HTML import, all browser chunks belong to that manifest.
// When there are multiple HTML imports, only include chunks that intersect with this entry's bits.
const has_single_html_import = graph.html_imports.html_source_indices.len == 1;
for (chunks) |*ch| {
if (ch.entryBits().hasIntersection(&entry_point_bits)) {
if (ch.entryBits().hasIntersection(&entry_point_bits) or
(has_single_html_import and ch.flags.is_browser_chunk_from_server_build))
{
if (!first) try writer.writeAll(",");
first = false;

View File

@@ -229,6 +229,16 @@ pub noinline fn computeChunks(
.output_source_map = SourceMap.SourceMapPieces.init(this.allocator()),
.flags = .{ .is_browser_chunk_from_server_build = is_browser_chunk_from_server_build },
};
} else if (could_be_browser_target_from_server_build and
!js_chunk_entry.value_ptr.entry_point.is_entry_point and
!js_chunk_entry.value_ptr.flags.is_browser_chunk_from_server_build and
ast_targets[source_index.get()] == .browser)
{
// If any file in the chunk has browser target, mark the whole chunk as browser.
// This handles the case where a lazy-loaded chunk (code splitting chunk, not entry point)
// contains browser-targeted files but was first created by a non-browser file.
// We only apply this to non-entry-point chunks to preserve the correct side for server entry points.
js_chunk_entry.value_ptr.flags.is_browser_chunk_from_server_build = true;
}
const entry = js_chunk_entry.value_ptr.files_with_parts_in_chunk.getOrPut(this.allocator(), @as(u32, @truncate(source_index.get()))) catch unreachable;

View File

@@ -690,13 +690,26 @@ pub const Command = struct {
const original_argv_len = bun.argv.len;
var argv_list = std.array_list.Managed([:0]const u8).fromOwnedSlice(bun.default_allocator, bun.argv);
try bun.appendOptionsEnv(graph.compile_exec_argv, &argv_list, bun.default_allocator);
bun.argv = argv_list.items;
// Store the full argv including user arguments
const full_argv = argv_list.items;
const num_exec_argv_options = full_argv.len -| original_argv_len;
// Calculate offset: skip executable name + all exec argv options
offset_for_passthrough = if (bun.argv.len > 1) 1 + (bun.argv.len -| original_argv_len) else 0;
offset_for_passthrough = if (full_argv.len > 1) 1 + num_exec_argv_options else 0;
// Temporarily set bun.argv to only include executable name + exec_argv options.
// This prevents user arguments like --version/--help from being intercepted
// by Bun's argument parser (they should be passed through to user code).
bun.argv = full_argv[0..@min(1 + num_exec_argv_options, full_argv.len)];
// Handle actual options to parse.
break :brk try Command.init(allocator, log, .AutoCommand);
const result = try Command.init(allocator, log, .AutoCommand);
// Restore full argv so passthrough calculation works correctly
bun.argv = full_argv;
break :brk result;
}
context_data = .{

View File

@@ -297,6 +297,16 @@ fn getHomeConfigPath(buf: *bun.PathBuffer) ?[:0]const u8 {
return null;
}
pub fn loadConfig(allocator: std.mem.Allocator, user_config_path_: ?string, ctx: Command.Context, comptime cmd: Command.Tag) OOM!void {
// If running as a standalone executable with autoloadBunfig disabled, skip config loading
// unless an explicit config path was provided via --config
if (user_config_path_ == null) {
if (bun.StandaloneModuleGraph.get()) |graph| {
if (graph.flags.disable_autoload_bunfig) {
return;
}
}
}
var config_buf: bun.PathBuffer = undefined;
if (comptime cmd.readGlobalConfig()) {
if (!ctx.has_loaded_global_config) {

View File

@@ -220,7 +220,7 @@ pub const BorderRadiusHandler = struct {
if (logical_supported) {
bun.handleOom(d.append(ctx.allocator, v));
} else {
const prefix = ctx.targets.prefixes(css.VendorPrefix{}, css.prefixes.Feature.border_radius);
const prefix = ctx.targets.prefixes(css.VendorPrefix{ .none = true }, css.prefixes.Feature.border_radius);
switch (v) {
.@"border-start-start-radius",
.@"border-start-end-radius",

View File

@@ -1,8 +1,14 @@
const { isIP, isIPv6 } = require("internal/net/isIP");
const { checkIsHttpToken, validateFunction, validateInteger, validateBoolean } = require("internal/validators");
const {
checkIsHttpToken,
validateFunction,
validateInteger,
validateBoolean,
validateString,
} = require("internal/validators");
const { urlToHttpOptions } = require("internal/url");
const { isValidTLSArray } = require("internal/tls");
const { throwOnInvalidTLSArray } = require("internal/tls");
const { validateHeaderName } = require("node:_http_common");
const { getTimerDuration } = require("internal/timers");
const { ConnResetException } = require("internal/shared");
@@ -728,53 +734,48 @@ function ClientRequest(input, options, cb) {
throw new Error("pfx is not supported");
}
if (options.rejectUnauthorized !== undefined) this._ensureTls().rejectUnauthorized = options.rejectUnauthorized;
else {
let agentRejectUnauthorized = agent?.options?.rejectUnauthorized;
if (agentRejectUnauthorized !== undefined) this._ensureTls().rejectUnauthorized = agentRejectUnauthorized;
else {
// popular https-proxy-agent uses connectOpts
agentRejectUnauthorized = agent?.connectOpts?.rejectUnauthorized;
if (agentRejectUnauthorized !== undefined) this._ensureTls().rejectUnauthorized = agentRejectUnauthorized;
}
}
if (options.ca) {
if (!isValidTLSArray(options.ca))
throw new TypeError(
"ca argument must be an string, Buffer, TypedArray, BunFile or an array containing string, Buffer, TypedArray or BunFile",
);
this._ensureTls().ca = options.ca;
}
if (options.cert) {
if (!isValidTLSArray(options.cert))
throw new TypeError(
"cert argument must be an string, Buffer, TypedArray, BunFile or an array containing string, Buffer, TypedArray or BunFile",
);
this._ensureTls().cert = options.cert;
}
if (options.key) {
if (!isValidTLSArray(options.key))
throw new TypeError(
"key argument must be an string, Buffer, TypedArray, BunFile or an array containing string, Buffer, TypedArray or BunFile",
);
this._ensureTls().key = options.key;
}
if (options.passphrase) {
if (typeof options.passphrase !== "string") throw new TypeError("passphrase argument must be a string");
this._ensureTls().passphrase = options.passphrase;
}
if (options.ciphers) {
if (typeof options.ciphers !== "string") throw new TypeError("ciphers argument must be a string");
this._ensureTls().ciphers = options.ciphers;
}
if (options.servername) {
if (typeof options.servername !== "string") throw new TypeError("servername argument must be a string");
this._ensureTls().servername = options.servername;
}
// Merge TLS options using spread operator, matching Node.js behavior in createSocket:
// options = { __proto__: null, ...options, ...this.options };
// https://github.com/nodejs/node/blob/v23.6.0/lib/_http_agent.js#L242
// With spread, the last one wins, so agent.options overwrites request options.
//
// agent.options: Stored by Node.js Agent constructor
// https://github.com/nodejs/node/blob/v23.6.0/lib/_http_agent.js#L96
//
// agent.connectOpts: Used by https-proxy-agent for TLS connection options (lowest priority)
// https://github.com/TooTallNate/proxy-agents/blob/main/packages/https-proxy-agent/src/index.ts#L110-L117
const mergedTlsOptions = { __proto__: null, ...agent?.connectOpts, ...options, ...agent?.options };
if (options.secureOptions) {
if (typeof options.secureOptions !== "number") throw new TypeError("secureOptions argument must be a string");
this._ensureTls().secureOptions = options.secureOptions;
if (mergedTlsOptions.rejectUnauthorized !== undefined) {
this._ensureTls().rejectUnauthorized = mergedTlsOptions.rejectUnauthorized;
}
if (mergedTlsOptions.ca) {
throwOnInvalidTLSArray("options.ca", mergedTlsOptions.ca);
this._ensureTls().ca = mergedTlsOptions.ca;
}
if (mergedTlsOptions.cert) {
throwOnInvalidTLSArray("options.cert", mergedTlsOptions.cert);
this._ensureTls().cert = mergedTlsOptions.cert;
}
if (mergedTlsOptions.key) {
throwOnInvalidTLSArray("options.key", mergedTlsOptions.key);
this._ensureTls().key = mergedTlsOptions.key;
}
if (mergedTlsOptions.passphrase) {
validateString(mergedTlsOptions.passphrase, "options.passphrase");
this._ensureTls().passphrase = mergedTlsOptions.passphrase;
}
if (mergedTlsOptions.ciphers) {
validateString(mergedTlsOptions.ciphers, "options.ciphers");
this._ensureTls().ciphers = mergedTlsOptions.ciphers;
}
if (mergedTlsOptions.servername) {
validateString(mergedTlsOptions.servername, "options.servername");
this._ensureTls().servername = mergedTlsOptions.servername;
}
if (mergedTlsOptions.secureOptions) {
validateInteger(mergedTlsOptions.secureOptions, "options.secureOptions");
this._ensureTls().secureOptions = mergedTlsOptions.secureOptions;
}
this[kPath] = options.path || "/";
if (cb) {

View File

@@ -597,12 +597,6 @@ pub const NumberRenamer = struct {
r.number_scope_pool.put(s);
};
// Ignore function argument scopes
if (scope.kind == .function_args and scope.children.len == 1) {
scope = scope.children.ptr[0];
bun.assert(scope.kind == .function_body);
}
while (true) {
if (scope.members.count() > 0 or scope.generated.len > 0) {
const new_child_scope = r.number_scope_pool.get();
@@ -617,10 +611,6 @@ pub const NumberRenamer = struct {
if (scope.children.len == 1) {
scope = scope.children.ptr[0];
if (scope.kind == .function_args and scope.children.len == 1) {
scope = scope.children.ptr[0];
bun.assert(scope.kind == .function_body);
}
} else {
break;
}

View File

@@ -225,6 +225,9 @@ pub const ShellLsTask = struct {
is_absolute: bool = false,
err: ?Syscall.Error = null,
result_kind: enum { file, dir, idk } = .idk,
/// Cached current time (seconds since epoch) for formatting timestamps.
/// Cached once per task to avoid repeated syscalls.
#now_secs: u64 = 0,
event_loop: jsc.EventLoopHandle,
concurrent_task: jsc.EventLoopTask,
@@ -293,6 +296,11 @@ pub const ShellLsTask = struct {
}
pub fn run(this: *@This()) void {
// Cache current time once per task for timestamp formatting
if (this.opts.long_listing) {
this.#now_secs = @intCast(std.time.timestamp());
}
const fd = switch (ShellSyscall.openat(this.cwd, this.path, bun.O.RDONLY | bun.O.DIRECTORY, 0)) {
.err => |e| {
switch (e.getErrno()) {
@@ -301,7 +309,7 @@ pub const ShellLsTask = struct {
},
.NOTDIR => {
this.result_kind = .file;
this.addEntry(this.path);
this.addEntry(this.path, this.cwd);
},
else => {
this.err = this.errorWithPath(e, this.path);
@@ -329,7 +337,7 @@ pub const ShellLsTask = struct {
// If `-a` is used, "." and ".." should show up as results. However,
// our `DirIterator` abstraction skips them, so let's just add them
// now.
this.addDotEntriesIfNeeded();
this.addDotEntriesIfNeeded(fd);
while (switch (entry) {
.err => |e| {
@@ -338,7 +346,7 @@ pub const ShellLsTask = struct {
},
.result => |ent| ent,
}) |current| : (entry = iterator.next()) {
this.addEntry(current.name.sliceAssumeZ());
this.addEntry(current.name.sliceAssumeZ(), fd);
if (current.kind == .directory and this.opts.recursive) {
this.enqueue(current.name.sliceAssumeZ());
}
@@ -367,20 +375,167 @@ pub const ShellLsTask = struct {
}
// TODO more complex output like multi-column
fn addEntry(this: *@This(), name: [:0]const u8) void {
fn addEntry(this: *@This(), name: [:0]const u8, dir_fd: bun.FileDescriptor) void {
const skip = this.shouldSkipEntry(name);
debug("Entry: (skip={}) {s} :: {s}", .{ skip, this.path, name });
if (skip) return;
bun.handleOom(this.output.ensureUnusedCapacity(name.len + 1));
bun.handleOom(this.output.appendSlice(name));
bun.handleOom(this.output.append('\n'));
if (this.opts.long_listing) {
this.addEntryLong(name, dir_fd);
} else {
bun.handleOom(this.output.ensureUnusedCapacity(name.len + 1));
bun.handleOom(this.output.appendSlice(name));
bun.handleOom(this.output.append('\n'));
}
}
fn addDotEntriesIfNeeded(this: *@This()) void {
fn addEntryLong(this: *@This(), name: [:0]const u8, dir_fd: bun.FileDescriptor) void {
// Use lstatat to not follow symlinks (so symlinks show as 'l' type)
const stat_result = Syscall.lstatat(dir_fd, name);
const stat = switch (stat_result) {
.err => {
// If stat fails, just output the name with placeholders
const writer = this.output.writer();
bun.handleOom(writer.print("?????????? ? ? ? ? ? {s}\n", .{name}));
return;
},
.result => |s| s,
};
const writer = this.output.writer();
// File type and permissions
const mode: u32 = @intCast(stat.mode);
const file_type = getFileTypeChar(mode);
const perms = formatPermissions(mode);
// Number of hard links
const nlink: u64 = @intCast(stat.nlink);
// Owner and group (numeric)
const uid: u64 = @intCast(stat.uid);
const gid: u64 = @intCast(stat.gid);
// File size
const size: i64 = @intCast(stat.size);
// Modification time
const mtime = stat.mtime();
const time_str = formatTime(@intCast(mtime.sec), this.#now_secs);
bun.handleOom(writer.print("{c}{s} {d: >3} {d: >5} {d: >5} {d: >8} {s} {s}\n", .{
file_type,
&perms,
nlink,
uid,
gid,
size,
&time_str,
name,
}));
}
fn getFileTypeChar(mode: u32) u8 {
const file_type = mode & bun.S.IFMT;
return switch (file_type) {
bun.S.IFDIR => 'd',
bun.S.IFLNK => 'l',
bun.S.IFBLK => 'b',
bun.S.IFCHR => 'c',
bun.S.IFIFO => 'p',
bun.S.IFSOCK => 's',
else => '-', // IFREG or unknown
};
}
fn formatPermissions(mode: u32) [9]u8 {
var perms: [9]u8 = undefined;
// Owner permissions
perms[0] = if (mode & bun.S.IRUSR != 0) 'r' else '-';
perms[1] = if (mode & bun.S.IWUSR != 0) 'w' else '-';
// Owner execute with setuid handling
const owner_exec = mode & bun.S.IXUSR != 0;
const setuid = mode & bun.S.ISUID != 0;
perms[2] = if (setuid)
(if (owner_exec) 's' else 'S')
else
(if (owner_exec) 'x' else '-');
// Group permissions
perms[3] = if (mode & bun.S.IRGRP != 0) 'r' else '-';
perms[4] = if (mode & bun.S.IWGRP != 0) 'w' else '-';
// Group execute with setgid handling
const group_exec = mode & bun.S.IXGRP != 0;
const setgid = mode & bun.S.ISGID != 0;
perms[5] = if (setgid)
(if (group_exec) 's' else 'S')
else
(if (group_exec) 'x' else '-');
// Other permissions
perms[6] = if (mode & bun.S.IROTH != 0) 'r' else '-';
perms[7] = if (mode & bun.S.IWOTH != 0) 'w' else '-';
// Other execute with sticky bit handling
const other_exec = mode & bun.S.IXOTH != 0;
const sticky = mode & bun.S.ISVTX != 0;
perms[8] = if (sticky)
(if (other_exec) 't' else 'T')
else
(if (other_exec) 'x' else '-');
return perms;
}
fn formatTime(timestamp: i64, now_secs: u64) [12]u8 {
var buf: [12]u8 = undefined;
// Format as "Mon DD HH:MM" for recent files (within 6 months)
// or "Mon DD YYYY" for older files
const epoch_secs: u64 = if (timestamp < 0) 0 else @intCast(timestamp);
const epoch = std.time.epoch.EpochSeconds{ .secs = epoch_secs };
const day_seconds = epoch.getDaySeconds();
const year_day = epoch.getEpochDay().calculateYearDay();
const month_names = [_][]const u8{ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" };
const month_day = year_day.calculateMonthDay();
const month_name = month_names[month_day.month.numeric() - 1];
// Check if file is older than 6 months (approximately 180 days)
const six_months_secs: u64 = 180 * 24 * 60 * 60;
const is_recent = epoch_secs > now_secs -| six_months_secs and epoch_secs <= now_secs + six_months_secs;
if (is_recent) {
const hours = day_seconds.getHoursIntoDay();
const minutes = day_seconds.getMinutesIntoHour();
_ = std.fmt.bufPrint(&buf, "{s} {d:0>2} {d:0>2}:{d:0>2}", .{
month_name,
month_day.day_index + 1,
hours,
minutes,
}) catch {
@memcpy(&buf, "??? ?? ??:??");
};
} else {
// Show year for old files
const year = year_day.year;
_ = std.fmt.bufPrint(&buf, "{s} {d:0>2} {d:4}", .{
month_name,
month_day.day_index + 1,
year,
}) catch {
@memcpy(&buf, "??? ?? ????");
};
}
return buf;
}
fn addDotEntriesIfNeeded(this: *@This(), dir_fd: bun.FileDescriptor) void {
// `.addEntry()` already checks will check if we can add "." and ".." to
// the result
this.addEntry(".");
this.addEntry("..");
this.addEntry(".", dir_fd);
this.addEntry("..", dir_fd);
}
fn errorWithPath(this: *@This(), err: Syscall.Error, path: [:0]const u8) Syscall.Error {

View File

@@ -646,7 +646,7 @@ pub fn handleCommand(this: *MySQLConnection, comptime Context: type, reader: New
.failed => {
const connection = this.getJSConnection();
defer {
this.queue.advance(connection);
this.flushQueue() catch {};
}
this.#flags.is_ready_for_query = true;
this.queue.markAsReadyForQuery();
@@ -933,7 +933,11 @@ fn handleResultSetOK(this: *MySQLConnection, request: *JSMySQLQuery, statement:
const connection = this.getJSConnection();
debug("handleResultSetOK: {d} {}", .{ status_flags.toInt(), is_last_result });
defer {
this.queue.advance(connection);
// Use flushQueue instead of just advance to ensure any data written
// by queries added during onQueryResult is actually sent.
// This fixes a race condition where the auto flusher may not be
// registered if the queue's current item is completed (not pending).
this.flushQueue() catch {};
}
this.#flags.is_ready_for_query = is_last_result;
if (is_last_result) {
@@ -977,7 +981,7 @@ fn handleResultSet(this: *MySQLConnection, comptime Context: type, reader: NewRe
try err.decode(reader);
defer err.deinit();
defer {
this.queue.advance(connection);
this.flushQueue() catch {};
}
if (request.getStatement()) |statement| {
statement.reset();

View File

@@ -1,4 +1,9 @@
pub fn decodeBinaryValue(globalObject: *jsc.JSGlobalObject, field_type: types.FieldType, column_length: u32, raw: bool, bigint: bool, unsigned: bool, binary: bool, comptime Context: type, reader: NewReader(Context)) !SQLDataCell {
/// MySQL's "binary" pseudo-charset ID. Columns with this character_set value
/// are true binary types (BINARY, VARBINARY, BLOB), as opposed to string columns
/// with binary collations (e.g., utf8mb4_bin) which have different character_set values.
pub const binary_charset: u16 = 63;
pub fn decodeBinaryValue(globalObject: *jsc.JSGlobalObject, field_type: types.FieldType, column_length: u32, raw: bool, bigint: bool, unsigned: bool, binary: bool, character_set: u16, comptime Context: type, reader: NewReader(Context)) !SQLDataCell {
debug("decodeBinaryValue: {s}", .{@tagName(field_type)});
return switch (field_type) {
.MYSQL_TYPE_TINY => {
@@ -151,7 +156,11 @@ pub fn decodeBinaryValue(globalObject: *jsc.JSGlobalObject, field_type: types.Fi
}
var string_data = try reader.encodeLenString();
defer string_data.deinit();
if (binary) {
// Only treat as binary if character_set indicates the binary pseudo-charset.
// The BINARY flag alone is insufficient because VARCHAR/CHAR columns
// with _bin collations (e.g., utf8mb4_bin) also have the BINARY flag set,
// but should return strings, not buffers.
if (binary and character_set == binary_charset) {
return SQLDataCell.raw(&string_data);
}
const slice = string_data.slice();

View File

@@ -140,7 +140,11 @@ pub const Row = struct {
}
},
else => {
if (column.flags.BINARY) {
// Only treat as binary if character_set indicates the binary pseudo-charset.
// The BINARY flag alone is insufficient because VARCHAR/CHAR columns
// with _bin collations (e.g., utf8mb4_bin) also have the BINARY flag set,
// but should return strings, not buffers.
if (column.flags.BINARY and column.character_set == DecodeBinaryValue.binary_charset) {
cell.* = SQLDataCell.raw(value);
} else {
const slice = value.slice();
@@ -230,7 +234,7 @@ pub const Row = struct {
}
const column = this.columns[i];
value.* = try decodeBinaryValue(this.globalObject, column.column_type, column.column_length, this.raw, this.bigint, column.flags.UNSIGNED, column.flags.BINARY, Context, reader);
value.* = try decodeBinaryValue(this.globalObject, column.column_type, column.column_length, this.raw, this.bigint, column.flags.UNSIGNED, column.flags.BINARY, column.character_set, Context, reader);
value.index = switch (column.name_or_index) {
// The indexed columns can be out of order.
.index => |idx| idx,
@@ -260,9 +264,11 @@ const std = @import("std");
const Data = @import("../../shared/Data.zig").Data;
const SQLDataCell = @import("../../shared/SQLDataCell.zig").SQLDataCell;
const SQLQueryResultMode = @import("../../shared/SQLQueryResultMode.zig").SQLQueryResultMode;
const decodeBinaryValue = @import("./DecodeBinaryValue.zig").decodeBinaryValue;
const decodeLengthInt = @import("./EncodeInt.zig").decodeLengthInt;
const DecodeBinaryValue = @import("./DecodeBinaryValue.zig");
const decodeBinaryValue = DecodeBinaryValue.decodeBinaryValue;
const NewReader = @import("./NewReader.zig").NewReader;
const decoderWrap = @import("./NewReader.zig").decoderWrap;

View File

@@ -744,6 +744,28 @@ pub fn fstatat(fd: bun.FileDescriptor, path: [:0]const u8) Maybe(bun.Stat) {
return Maybe(bun.Stat){ .result = stat_buf };
}
/// Like fstatat but does not follow symlinks (uses AT_SYMLINK_NOFOLLOW)
pub fn lstatat(fd: bun.FileDescriptor, path: [:0]const u8) Maybe(bun.Stat) {
if (Environment.isWindows) {
// On Windows, use O.NOFOLLOW to get lstat behavior (prevents following symlinks)
return switch (openatWindowsA(fd, path, O.NOFOLLOW, 0)) {
.result => |file| {
defer file.close();
return fstat(file);
},
.err => |err| Maybe(bun.Stat){ .err = err },
};
}
var stat_buf = mem.zeroes(bun.Stat);
const fd_valid = if (fd == bun.invalid_fd) std.posix.AT.FDCWD else fd.native();
if (Maybe(bun.Stat).errnoSysFP(syscall.fstatat(fd_valid, path, &stat_buf, std.posix.AT.SYMLINK_NOFOLLOW), .fstatat, fd, path)) |err| {
log("lstatat({f}, {s}) = {s}", .{ fd, path, @tagName(err.getErrno()) });
return err;
}
log("lstatat({f}, {s}) = 0", .{ fd, path });
return Maybe(bun.Stat){ .result = stat_buf };
}
pub fn mkdir(file_path: [:0]const u8, flags: mode_t) Maybe(void) {
return switch (Environment.os) {
.mac => Maybe(void).errnoSysP(syscall.mkdir(file_path, flags), .mkdir, file_path) orelse .success,

View File

@@ -453,6 +453,84 @@ console.log("PRELOAD");
},
});
// Test that autoloadBunfig: false works with execArgv (regression test for #25640)
// When execArgv is present, bunfig should still be disabled if autoloadBunfig: false
itBundled("compile/AutoloadBunfigDisabledWithExecArgv", {
compile: {
autoloadBunfig: false,
execArgv: ["--smol"],
},
files: {
"/entry.ts": /* js */ `
console.log("ENTRY");
`,
},
runtimeFiles: {
"/bunfig.toml": `
preload = ["./preload.ts"]
`,
"/preload.ts": `
console.log("PRELOAD");
`,
},
run: {
// When bunfig is disabled, preload should NOT execute even with execArgv
stdout: "ENTRY",
setCwd: true,
},
});
// Test CLI backend for autoloadBunfig: false with execArgv (regression test for #25640)
itBundled("compile/AutoloadBunfigDisabledWithExecArgvCLI", {
compile: {
autoloadBunfig: false,
execArgv: ["--smol"],
},
backend: "cli",
files: {
"/entry.ts": /* js */ `
console.log("ENTRY");
`,
},
runtimeFiles: {
"/bunfig.toml": `
preload = ["./preload.ts"]
`,
"/preload.ts": `
console.log("PRELOAD");
`,
},
run: {
stdout: "ENTRY",
setCwd: true,
},
});
// Test that autoloadBunfig: true with execArgv still loads bunfig
itBundled("compile/AutoloadBunfigEnabledWithExecArgv", {
compile: {
autoloadBunfig: true,
execArgv: ["--smol"],
},
files: {
"/entry.ts": /* js */ `
console.log("ENTRY");
`,
},
runtimeFiles: {
"/bunfig.toml": `
preload = ["./preload.ts"]
`,
"/preload.ts": `
console.log("PRELOAD");
`,
},
run: {
stdout: "PRELOAD\nENTRY",
setCwd: true,
},
});
// Test that both tsconfig and package.json can be enabled together
itBundled("compile/AutoloadBothTsconfigAndPackageJson", {
compile: {

View File

@@ -175,4 +175,105 @@ describe("bundler", () => {
stdout: /SUCCESS: user arguments properly passed with exec argv present/,
},
});
// Test that --version and --help flags are passed through to user code (issue #26082)
// When compile-exec-argv is used, user flags like --version should NOT be intercepted by Bun
itBundled("compile/CompileExecArgvVersionHelpPassthrough", {
compile: {
execArgv: ["--smol"],
},
backend: "cli",
files: {
"/entry.ts": /* js */ `
// Test that --version and --help are passed through to user code, not intercepted by Bun
const args = process.argv.slice(2);
console.log("User args:", JSON.stringify(args));
if (args.includes("--version")) {
console.log("APP_VERSION:1.0.0");
} else if (args.includes("-v")) {
console.log("APP_VERSION:1.0.0");
} else if (args.includes("--help")) {
console.log("APP_HELP:This is my app help");
} else if (args.includes("-h")) {
console.log("APP_HELP:This is my app help");
} else {
console.log("NO_FLAG_MATCHED");
}
`,
},
run: {
args: ["--version"],
stdout: /APP_VERSION:1\.0\.0/,
},
});
// Test with -v short flag
itBundled("compile/CompileExecArgvShortVersionPassthrough", {
compile: {
execArgv: ["--smol"],
},
backend: "cli",
files: {
"/entry.ts": /* js */ `
const args = process.argv.slice(2);
if (args.includes("-v")) {
console.log("APP_VERSION:1.0.0");
} else {
console.log("FAIL: -v not found in args:", args);
process.exit(1);
}
`,
},
run: {
args: ["-v"],
stdout: /APP_VERSION:1\.0\.0/,
},
});
// Test with --help flag
itBundled("compile/CompileExecArgvHelpPassthrough", {
compile: {
execArgv: ["--smol"],
},
backend: "cli",
files: {
"/entry.ts": /* js */ `
const args = process.argv.slice(2);
if (args.includes("--help")) {
console.log("APP_HELP:my custom help");
} else {
console.log("FAIL: --help not found in args:", args);
process.exit(1);
}
`,
},
run: {
args: ["--help"],
stdout: /APP_HELP:my custom help/,
},
});
// Test with -h short flag
itBundled("compile/CompileExecArgvShortHelpPassthrough", {
compile: {
execArgv: ["--smol"],
},
backend: "cli",
files: {
"/entry.ts": /* js */ `
const args = process.argv.slice(2);
if (args.includes("-h")) {
console.log("APP_HELP:my custom help");
} else {
console.log("FAIL: -h not found in args:", args);
process.exit(1);
}
`,
},
run: {
args: ["-h"],
stdout: /APP_HELP:my custom help/,
},
});
});

View File

@@ -0,0 +1,262 @@
import { Subprocess, spawn } from "bun";
import { afterEach, describe, expect, test } from "bun:test";
import { bunEnv, bunExe, isPosix, tempDir } from "harness";
import { join } from "node:path";
import { InspectorSession, connect } from "./junit-reporter";
import { SocketFramer } from "./socket-framer";
/**
* Extended InspectorSession with helper methods for TestReporter testing
*/
class TestReporterSession extends InspectorSession {
private foundTests: Map<number, any> = new Map();
private startedTests: Set<number> = new Set();
private endedTests: Map<number, any> = new Map();
constructor() {
super();
this.setupTestEventListeners();
}
private setupTestEventListeners() {
this.addEventListener("TestReporter.found", (params: any) => {
this.foundTests.set(params.id, params);
});
this.addEventListener("TestReporter.start", (params: any) => {
this.startedTests.add(params.id);
});
this.addEventListener("TestReporter.end", (params: any) => {
this.endedTests.set(params.id, params);
});
}
enableInspector() {
this.send("Inspector.enable");
}
enableTestReporter() {
this.send("TestReporter.enable");
}
enableAll() {
this.send("Inspector.enable");
this.send("TestReporter.enable");
this.send("LifecycleReporter.enable");
this.send("Console.enable");
this.send("Runtime.enable");
}
initialize() {
this.send("Inspector.initialized");
}
unref() {
this.socket?.unref();
}
ref() {
this.socket?.ref();
}
getFoundTests() {
return this.foundTests;
}
getStartedTests() {
return this.startedTests;
}
getEndedTests() {
return this.endedTests;
}
clearFoundTests() {
this.foundTests.clear();
}
waitForEvent(eventName: string, timeout = 10000): Promise<any> {
this.ref();
return new Promise((resolve, reject) => {
const timer = setTimeout(() => {
reject(new Error(`Timeout waiting for event: ${eventName}`));
}, timeout);
const listener = (params: any) => {
clearTimeout(timer);
resolve(params);
};
this.addEventListener(eventName, listener);
});
}
/**
* Wait for a specific number of TestReporter.found events
*/
waitForFoundTests(count: number, timeout = 10000): Promise<Map<number, any>> {
this.ref();
return new Promise((resolve, reject) => {
const timer = setTimeout(() => {
reject(
new Error(
`Timeout waiting for ${count} found tests, got ${this.foundTests.size}: ${JSON.stringify([...this.foundTests.values()])}`,
),
);
}, timeout);
const check = () => {
if (this.foundTests.size >= count) {
clearTimeout(timer);
resolve(this.foundTests);
}
};
// Check immediately in case we already have enough
check();
// Also listen for new events
this.addEventListener("TestReporter.found", check);
});
}
/**
* Wait for a specific number of TestReporter.end events
*/
waitForEndedTests(count: number, timeout = 10000): Promise<Map<number, any>> {
this.ref();
return new Promise((resolve, reject) => {
const timer = setTimeout(() => {
reject(new Error(`Timeout waiting for ${count} ended tests, got ${this.endedTests.size}`));
}, timeout);
const check = () => {
if (this.endedTests.size >= count) {
clearTimeout(timer);
resolve(this.endedTests);
}
};
check();
this.addEventListener("TestReporter.end", check);
});
}
}
describe.if(isPosix)("TestReporter inspector protocol", () => {
let proc: Subprocess | undefined;
let socket: ReturnType<typeof connect> extends Promise<infer T> ? T : never;
afterEach(() => {
proc?.kill();
proc = undefined;
// @ts-ignore - close the socket if it exists
socket?.end?.();
socket = undefined as any;
});
test("retroactively reports tests when TestReporter.enable is called after tests are discovered", async () => {
// This test specifically verifies that when TestReporter.enable is called AFTER
// test collection has started, the already-discovered tests are retroactively reported.
//
// The flow is:
// 1. Connect to inspector and enable only Inspector domain (NOT TestReporter)
// 2. Send Inspector.initialized to allow test collection and execution to proceed
// 3. Wait briefly for test collection to complete
// 4. THEN send TestReporter.enable - this should trigger retroactive reporting
// of tests that were discovered but not yet reported
using dir = tempDir("test-reporter-delayed-enable", {
"delayed.test.ts": `
import { describe, test, expect } from "bun:test";
describe("suite A", () => {
test("test A1", async () => {
// Add delay to ensure we have time to enable TestReporter during execution
await Bun.sleep(500);
expect(1).toBe(1);
});
test("test A2", () => {
expect(2).toBe(2);
});
});
describe("suite B", () => {
test("test B1", () => {
expect(3).toBe(3);
});
});
`,
});
const socketPath = join(String(dir), `inspector-${Math.random().toString(36).substring(2)}.sock`);
const session = new TestReporterSession();
const framer = new SocketFramer((message: string) => {
session.onMessage(message);
});
const socketPromise = connect(`unix://${socketPath}`).then(s => {
socket = s;
session.socket = s;
session.framer = framer;
s.data = {
onData: framer.onData.bind(framer),
};
return s;
});
proc = spawn({
cmd: [bunExe(), `--inspect-wait=unix:${socketPath}`, "test", "delayed.test.ts"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
await socketPromise;
// Enable Inspector only (NOT TestReporter)
session.enableInspector();
// Signal ready - this allows test collection and execution to proceed
session.initialize();
// Wait for test collection and first test to start running
// The first test has a 500ms sleep, so waiting 200ms ensures we're in execution phase
await Bun.sleep(200);
// Now enable TestReporter - this should trigger retroactive reporting
// of all tests that were discovered while TestReporter was disabled
session.enableTestReporter();
// We should receive found events for all tests retroactively
// Structure: 2 describes + 3 tests = 5 items
const foundTests = await session.waitForFoundTests(5, 15000);
expect(foundTests.size).toBe(5);
const testsArray = [...foundTests.values()];
const describes = testsArray.filter(t => t.type === "describe");
const tests = testsArray.filter(t => t.type === "test");
expect(describes.length).toBe(2);
expect(tests.length).toBe(3);
// Verify the test names
const testNames = tests.map(t => t.name).sort();
expect(testNames).toEqual(["test A1", "test A2", "test B1"]);
// Verify describe names
const describeNames = describes.map(d => d.name).sort();
expect(describeNames).toEqual(["suite A", "suite B"]);
// Wait for tests to complete
const endedTests = await session.waitForEndedTests(3, 15000);
expect(endedTests.size).toBe(3);
const exitCode = await proc.exited;
expect(exitCode).toBe(0);
});
});

View File

@@ -0,0 +1,30 @@
-----BEGIN ENCRYPTED PRIVATE KEY-----
MIIFNTBfBgkqhkiG9w0BBQ0wUjAxBgkqhkiG9w0BBQwwJAQQieLggVjbubz09mX5
GdRQAwICCAAwDAYIKoZIhvcNAgkFADAdBglghkgBZQMEASoEEJ++f2E23qU4mbP4
m3RnPasEggTQoS6zcBDvWURYyctw9Qma8L/ZnPg4SBclVzYbiZcvBPNRvCNLnYxQ
ysimU/8PTCP9m944dcsMolRqPjj0gOQCnBpqbZmnc7elwDFZIhePRfMKC2bPHZeo
ABonNOs2VstJ9gT3RA5x8Dj99dsoPdnV9rL6vkW0Gk86BPGgQq5i1ipJvYrpOtay
Bq5JgpptVX86azXZVriB8FUNfJuFOPQfxfXIY7ogHpQWZ7rIVa5ug7LlJ7sLjakj
ph/4corzRnRr88/eFfhYbV5rob/Lvoq8+I2Hgf25ypJ2XdOoWAgDOvl6+k01v/Ci
VAYAE1v9RgmiAXFIE9uYbSIyhiVibmLU6QK7Vcydv0ZaZLdP/9HwfZ6Q5u1a23rj
ltzRFOu5H7ipVXSoZU1ffw2EXi1RZJU2n5M3tU11qZsNpaDulEdcYZm74sUaqdjA
zkYSO+RBehptEUfgjXBrW8HJ42fCfd6IvQ7NtT3e3zJup105cHIEfO8IiSSt/oW3
SOupzjTpARHhAbPKSEmUVC1IXjGUvUuZs+NlN+byNkI4IhSTHp4vn5k87l22jccl
4NwW5ZIouqawvV5gyOGgBcwgSfvd4H8mcSeFfZhVmEtRDKtubREr8mqqcUWq5V/W
fEGR2LTQKRofhGGw56Jzw8FgNJNI0m6WBYIPQVtmwqqljPNPDuCQZ/icrhM6s0MR
7IyDiCUHzsz2JZxRJJO9pzItSABym/I57DTtRg1XQTEuSU+dTwhVzwkytWVldHx3
Rvbb6DUWrLtthoAs/LSDevjhrLYAdkLj4iaexqfYPcrRA22hj3KxxRpzV8zqMNvM
hI703HrjIPzlVhrqf6gMiKs7iZu2XQ4RRsQyKzWlro9bOprUvIg/abFtaJDXKqN0
sTJQ9rSpTJgUzG4sJEFiUeM0Wm2cLUO1w4N4/si89vOCcVJJUIjZgwsyFu8DpUIE
7E9rgAzuWByIBOJQ0f1hfF7zGUxAJ75qRdHm0q2aDkDPLiJk1alR1MpMs1tIcaBO
CAxnlZtORvq6QMQnERkpzuvX2PS5mtZ8w/qizPgb8GL3kU+Ex0lJHT8PBwspSXWV
Gc9AvCZ1z+YLnflUsRch/dI/suGhpIcLOX4M3pfW9qfo/i92uR52JWzIAkRKFTOi
fSiADLpar2WT2Kcz9aGfTB2swjhsL7Q6Tf8BWUCVYtfbf5FK07uPTCb9tyy+LxtU
qvtHe3XyZTO3guRBBDZotEOqNKzJw+ZUKIO7vX5JGtpMudBHL2J1KH80Qy4+uR/H
b9YyW0UFOyuOejmrMwHMP/iXkYyTsBiShETU0Uga33xvSuS10FhiCt87cXCI/WeZ
Jw1fk29QA3nx5vw9zDcVFiJRwOu9l6/JxXFpGm0ZjhYudS98yJkam3sbwJThJ+1C
fFzzCM69iUdPw/8JEPnD+Wd2okFiwjpEzHrZ+n1P5YGDF7UTyEB3gLpn3sgmBR9H
2z4yiL+ST/WI7n3ykXxzxjzcEgkDEwLfzHlguqh7jhYWuIhsDmcch7EgH8+gsyke
9lgUWJdoHXVfNZmWh4rMMkEUGi605WulXV8N9qQJJOJltN3lGdKZi+CBK6dTlPtJ
iAj5mvrk++pP/b0SplcQtq3pspGnWmjw+jw0aOVzSpn8qrco1/FZWdw=
-----END ENCRYPTED PRIVATE KEY-----

View File

@@ -1,28 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCIzOJskt6VkEJY
XKSJv/Gdil3XYkjk3NVc/+m+kzqnkTRbPtT9w+IGWgmJhuf9DJPLCwHFAEFarVwV
x16Q0PbU4ajXaLRHEYGhrH10oTMjQnJ24xVm26mxRXPQa5vaLpWJqNyIdNLIQLe+
UXUOzSGGsFTRMAjvYrkzjBe4ZUnaZV+aFY/ug0jfzeA1dJjzKZs6+yTJRbsuWUEb
8MsDmT4v+kBZDKdaDn7AFDWRVqx/38BnqsRzkM0CxpnyT2kRzw5zQajIE13gdTJo
1EHvYSUkkxrY5m30Rl9BuBBZBjhMzOHq0fYVVooHO+sf4XHPgvFTTxJum85u7J1J
oEUjrLKtAgMBAAECggEACInVNhaiqu4infZGVMy0rXMV8VwSlapM7O2SLtFsr0nK
XUmaLK6dvGzBPKK9dxdiYCFzPlMKQTkhzsAvYFWSmm3tRmikG+11TFyCRhXLpc8/
ark4vD9Io6ZkmKUmyKLwtXNjNGcqQtJ7RXc7Ga3nAkueN6JKZHqieZusXVeBGQ70
YH1LKyVNBeJggbj+g9rqaksPyNJQ8EWiNTJkTRQPazZ0o1VX/fzDFyr/a5npFtHl
4BHfafv9o1Xyr70Kie8CYYRJNViOCN+ylFs7Gd3XRaAkSkgMT/7DzrHdEM2zrrHK
yNg2gyDVX9UeEJG2X5UtU0o9BVW7WBshz/2hqIUHoQKBgQC8zsRFvC7u/rGr5vRR
mhZZG+Wvg03/xBSuIgOrzm+Qie6mAzOdVmfSL/pNV9EFitXt1yd2ROo31AbS7Evy
Bm/QVKr2mBlmLgov3B7O/e6ABteooOL7769qV/v+yo8VdEg0biHmsfGIIXDe3Lwl
OT0XwF9r/SeZLbw1zfkSsUVG/QKBgQC5fANM3Dc9LEek+6PHv5+eC1cKkyioEjUl
/y1VUD00aABI1TUcdLF3BtFN2t/S6HW0hrP3KwbcUfqC25k+GDLh1nM6ZK/gI3Yn
IGtCHxtE3S6jKhE9QcK/H+PzGVKWge9SezeYRP0GHJYDrTVTA8Kt9HgoZPPeReJl
+Ss9c8ThcQKBgECX6HQHFnNzNSufXtSQB7dCoQizvjqTRZPxVRoxDOABIGExVTYt
umUhPtu5AGyJ+/hblEeU+iBRbGg6qRzK8PPwE3E7xey8MYYAI5YjL7YjISKysBUL
AhM6uJ6Jg/wOBSnSx8xZ8kzlS+0izUda1rjKeprCSArSp8IsjlrDxPStAoGAEcPr
+P+altRX5Fhpvmb/Hb8OTif8G+TqjEIdkG9H/W38oP0ywg/3M2RGxcMx7txu8aR5
NjI7zPxZFxF7YvQkY3cLwEsGgVxEI8k6HLIoBXd90Qjlb82NnoqqZY1GWL4HMwo0
L/Rjm6M/Rwje852Hluu0WoIYzXA6F/Q+jPs6nzECgYAxx4IbDiGXuenkwSF1SUyj
NwJXhx4HDh7U6EO/FiPZE5BHE3BoTrFu3o1lzverNk7G3m+j+m1IguEAalHlukYl
rip9iUISlKYqbYZdLBoLwHAfHhszdrjqn8/v6oqbB5yR3HXjPFUWJo0WJ2pqJp56
ZshgmQQ/5Khoj6x0/dMPSg==
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDlYzosgRgXHL6v
Mh1V0ERFhsvlZrtRojSw6tafr3SQBphU793/rGiYZlL/lJ9HIlLkx9JMbuTjNm5U
2eRwHiTQIeWD4aCIESwPlkdaVYtC+IOj55bJN8xNa7h5GyJwF7PnPetAsKyE8DMB
n1gKMhaIis7HHOUtk4/K3Y4peU44d04z0yPt6JtY5Sbvi1E7pGX6T/2c9sHsdIDe
DctWnewpXXs8zkAla0KNWQfpDnpS53wxAfStTA4lSrA9daxC7hZopQlLxFIbJk+0
BLbEsXtrJ54T5iguHk+2MDVAy4MOqP9XbKV7eGHk73l6+CSwmHyHBxh4ChxRQeT5
BP0MUTn1AgMBAAECggEABtPvC5uVGr0DjQX2GxONsK8cOxoVec7U+C4pUMwBcXcM
yjxwlHdujpi/IDXtjsm+A2rSPu2vGPdKDfMFanPvPxW/Ne99noc6U0VzHsR8lnP8
wSB328nyJhzOeyZcXk9KTtgIPF7156gZsJLsZTNL+ej90i3xQWvKxCxXmrLuad5O
z/TrgZkC6wC3fgj1d3e8bMljQ7tLxbshJMYVI5o6RFTxy84DLI+rlvPkf7XbiMPf
2lsm4jcJKvfx+164HZJ9QVlx8ncqOHAnGvxb2xHHfqv4JAbz615t7yRvtaw4Paj5
6kQSf0VWnsVzgxNJWvnUZym/i/Qf5nQafjChCyKOEQKBgQD9f4SkvJrp/mFKWLHd
kDvRpSIIltfJsa5KShn1IHsQXFwc0YgyP4SKQb3Ckv+/9UFHK9EzM+WlPxZi7ZOS
hsWhIfkI4c4ORpxUQ+hPi0K2k+HIY7eYyONqDAzw5PGkKBo3mSGMHDXYywSqexhB
CCMHuHdMhwyHdz4PWYOK3C2VMQKBgQDnpsrHK7lM9aVb8wNhTokbK5IlTSzH/5oJ
lAVu6G6H3tM5YQeoDXztbZClvrvKU8DU5UzwaC+8AEWQwaram29QIDpAI3nVQQ0k
dmHHp/pCeADdRG2whaGcl418UJMMv8AUpWTRm+kVLTLqfTHBC0ji4NlCQMHCUCfd
U8TeUi5QBQKBgQDvJNd7mboDOUmLG7VgMetc0Y4T0EnuKsMjrlhimau/OYJkZX84
+BcPXwmnf4nqC3Lzs3B9/12L0MJLvZjUSHQ0mJoZOPxtF0vvasjEEbp0B3qe0wOn
DQ0NRCUJNNKJbJOfE8VEKnDZ/lx+f/XXk9eINwvElDrLqUBQtr+TxjbyYQKBgAxQ
lZ8Y9/TbajsFJDzcC/XhzxckjyjisbGoqNFIkfevJNN8EQgiD24f0Py+swUChtHK
jtiI8WCxMwGLCiYs9THxRKd8O1HW73fswy32BBvcfU9F//7OW9UTSXY+YlLfLrrq
P/3UqAN0L6y/kxGMJAfLpEEdaC+IS1Y8yc531/ZxAoGASYiasDpePtmzXklDxk3h
jEw64QAdXK2p/xTMjSeTtcqJ7fvaEbg+Mfpxq0mdTjfbTdR9U/nzAkwS7OoZZ4Du
ueMVls0IVqcNnBtikG8wgdxN27b5JPXS+GzQ0zDSpWFfRPZiIh37BAXr0D1voluJ
rEHkcals6p7hL98BoxjFIvA=
-----END PRIVATE KEY-----

View File

@@ -1,23 +1,23 @@
-----BEGIN CERTIFICATE-----
MIID5jCCAs6gAwIBAgIUN7coIsdMcLo9amZfkwogu0YkeLEwDQYJKoZIhvcNAQEL
BQAwfjELMAkGA1UEBhMCU0UxDjAMBgNVBAgMBVN0YXRlMREwDwYDVQQHDAhMb2Nh
dGlvbjEaMBgGA1UECgwRT3JnYW5pemF0aW9uIE5hbWUxHDAaBgNVBAsME09yZ2Fu
aXphdGlvbmFsIFVuaXQxEjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0yMzA5MjExNDE2
MjNaFw0yNDA5MjAxNDE2MjNaMH4xCzAJBgNVBAYTAlNFMQ4wDAYDVQQIDAVTdGF0
ZTERMA8GA1UEBwwITG9jYXRpb24xGjAYBgNVBAoMEU9yZ2FuaXphdGlvbiBOYW1l
MRwwGgYDVQQLDBNPcmdhbml6YXRpb25hbCBVbml0MRIwEAYDVQQDDAlsb2NhbGhv
c3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCIzOJskt6VkEJYXKSJ
v/Gdil3XYkjk3NVc/+m+kzqnkTRbPtT9w+IGWgmJhuf9DJPLCwHFAEFarVwVx16Q
0PbU4ajXaLRHEYGhrH10oTMjQnJ24xVm26mxRXPQa5vaLpWJqNyIdNLIQLe+UXUO
zSGGsFTRMAjvYrkzjBe4ZUnaZV+aFY/ug0jfzeA1dJjzKZs6+yTJRbsuWUEb8MsD
mT4v+kBZDKdaDn7AFDWRVqx/38BnqsRzkM0CxpnyT2kRzw5zQajIE13gdTJo1EHv
YSUkkxrY5m30Rl9BuBBZBjhMzOHq0fYVVooHO+sf4XHPgvFTTxJum85u7J1JoEUj
rLKtAgMBAAGjXDBaMA4GA1UdDwEB/wQEAwIDiDATBgNVHSUEDDAKBggrBgEFBQcD
ATAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0OBBYEFNzx4Rfs9m8XR5ML0WsI
sorKmB4PMA0GCSqGSIb3DQEBCwUAA4IBAQB87iQy8R0fiOky9WTcyzVeMaavS3MX
iTe1BRn1OCyDq+UiwwoNz7zdzZJFEmRtFBwPNFOe4HzLu6E+7yLFR552eYRHlqIi
/fiLb5JiZfPtokUHeqwELWBsoXtU8vKxViPiLZ09jkWOPZWo7b/xXd6QYykBfV91
usUXLzyTD2orMagpqNksLDGS3p3ggHEJBZtRZA8R7kPEw98xZHznOQpr26iv8kYz
ZWdLFoFdwgFBSfxePKax5rfo+FbwdrcTX0MhbORyiu2XsBAghf8s2vKDkHg2UQE8
haonxFYMFaASfaZ/5vWKYDTCJkJ67m/BtkpRafFEO+ad1i1S61OjfxH4
MIID4jCCAsqgAwIBAgIUcaRq6J/YF++Bo01Zc+HeQvCbnWMwDQYJKoZIhvcNAQEL
BQAwaTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh
bmNpc2NvMQ0wCwYDVQQKDARPdmVuMREwDwYDVQQLDAhUZWFtIEJ1bjETMBEGA1UE
AwwKc2VydmVyLWJ1bjAeFw0yNTA5MDYwMzAwNDlaFw0zNTA5MDQwMzAwNDlaMGkx
CzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNj
bzENMAsGA1UECgwET3ZlbjERMA8GA1UECwwIVGVhbSBCdW4xEzARBgNVBAMMCnNl
cnZlci1idW4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDlYzosgRgX
HL6vMh1V0ERFhsvlZrtRojSw6tafr3SQBphU793/rGiYZlL/lJ9HIlLkx9JMbuTj
Nm5U2eRwHiTQIeWD4aCIESwPlkdaVYtC+IOj55bJN8xNa7h5GyJwF7PnPetAsKyE
8DMBn1gKMhaIis7HHOUtk4/K3Y4peU44d04z0yPt6JtY5Sbvi1E7pGX6T/2c9sHs
dIDeDctWnewpXXs8zkAla0KNWQfpDnpS53wxAfStTA4lSrA9daxC7hZopQlLxFIb
Jk+0BLbEsXtrJ54T5iguHk+2MDVAy4MOqP9XbKV7eGHk73l6+CSwmHyHBxh4ChxR
QeT5BP0MUTn1AgMBAAGjgYEwfzAdBgNVHQ4EFgQUw7nEnh4uOdZVZUapQzdAUaVa
An0wHwYDVR0jBBgwFoAUw7nEnh4uOdZVZUapQzdAUaVaAn0wDwYDVR0TAQH/BAUw
AwEB/zAsBgNVHREEJTAjgglsb2NhbGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAA
AAEwDQYJKoZIhvcNAQELBQADggEBAEA8r1fvDLMSCb8bkAURpFk8chn8pl5MChzT
YUDaLdCCBjPXJkSXNdyuwS+T/ljAGyZbW5xuDccCNKltawO4CbyEXUEZbYr3w9eq
j8uqymJPhFf0O1rKOI2han5GBCgHwG13QwKI+4uu7390nD+TlzLOhxFfvOG7OadH
QNMNLNyldgF4Nb8vWdz0FtQiGUIrO7iq4LFhhd1lCxe0q+FAYSEYcc74WtF/Yo8V
JQauXuXyoP5FqLzNt/yeNQhceyIXJGKCsjr5/bASBmVlCwgRfsD3jpG37L8YCJs1
L4WEikcY4Lzb2NF9e94IyZdQsRqd9DFBF5zP013MSUiuhiow32k=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,710 @@
/**
* All tests in this file run in both Bun and Node.js.
*
* Test that TLS options can be inherited from agent.options and agent.connectOpts.
* This is important for compatibility with libraries like https-proxy-agent.
*
* The HttpsProxyAgent tests verify that TLS options are properly passed through
* the proxy tunnel to the target HTTPS server.
*/
import { once } from "node:events";
import { readFileSync } from "node:fs";
import http from "node:http";
import https from "node:https";
import { createRequire } from "node:module";
import type { AddressInfo } from "node:net";
import net from "node:net";
import { dirname, join } from "node:path";
import { describe, test } from "node:test";
import { fileURLToPath } from "node:url";
// Use createRequire for ESM compatibility
const require = createRequire(import.meta.url);
const { HttpsProxyAgent } = require("https-proxy-agent") as {
HttpsProxyAgent: new (proxyUrl: string, options?: Record<string, unknown>) => http.Agent;
};
const __dirname = dirname(fileURLToPath(import.meta.url));
// Self-signed certificate with SANs for localhost and 127.0.0.1
// This cert is its own CA (self-signed)
const tlsCerts = {
cert: readFileSync(join(__dirname, "fixtures", "cert.pem"), "utf8"),
key: readFileSync(join(__dirname, "fixtures", "cert.key"), "utf8"),
encryptedKey: readFileSync(join(__dirname, "fixtures", "cert.encrypted.key"), "utf8"),
passphrase: "testpassword",
// Self-signed cert, so it's its own CA
get ca() {
return this.cert;
},
};
async function createHttpsServer(
options: https.ServerOptions = {},
): Promise<{ server: https.Server; port: number; hostname: string }> {
const server = https.createServer({ key: tlsCerts.key, cert: tlsCerts.cert, ...options }, (req, res) => {
res.writeHead(200);
res.end("OK");
});
await once(server.listen(0, "127.0.0.1"), "listening");
const { port } = server.address() as AddressInfo;
return { server, port, hostname: "127.0.0.1" };
}
async function createHttpServer(): Promise<{
server: http.Server;
port: number;
hostname: string;
}> {
const server = http.createServer((req, res) => {
res.writeHead(200);
res.end("OK");
});
await once(server.listen(0, "127.0.0.1"), "listening");
const { port } = server.address() as AddressInfo;
return { server, port, hostname: "127.0.0.1" };
}
/**
* Create an HTTP CONNECT proxy server.
* This proxy handles the CONNECT method to establish tunnels for HTTPS connections.
*/
function createConnectProxy(): net.Server {
return net.createServer(clientSocket => {
let buffer: Uint8Array = new Uint8Array(0);
let tunnelEstablished = false;
let targetSocket: net.Socket | null = null;
clientSocket.on("data", (data: Uint8Array) => {
// If tunnel is already established, forward data directly
if (tunnelEstablished && targetSocket) {
targetSocket.write(data);
return;
}
// Concatenate buffers
const newBuffer = new Uint8Array(buffer.length + data.length);
newBuffer.set(buffer);
newBuffer.set(data, buffer.length);
buffer = newBuffer;
const bufferStr = new TextDecoder().decode(buffer);
// Check if we have complete headers
const headerEnd = bufferStr.indexOf("\r\n\r\n");
if (headerEnd === -1) return;
const headerPart = bufferStr.substring(0, headerEnd);
const lines = headerPart.split("\r\n");
const requestLine = lines[0];
// Check for CONNECT method
const match = requestLine.match(/^CONNECT\s+([^:]+):(\d+)\s+HTTP/);
if (!match) {
clientSocket.write("HTTP/1.1 400 Bad Request\r\n\r\n");
clientSocket.end();
return;
}
const [, targetHost, targetPort] = match;
// Get any data after the headers (shouldn't be any for CONNECT)
// headerEnd is byte position in the string, need to account for UTF-8
const headerBytes = new TextEncoder().encode(bufferStr.substring(0, headerEnd + 4)).length;
const remainingData = buffer.subarray(headerBytes);
// Connect to target
targetSocket = net.connect(parseInt(targetPort, 10), targetHost, () => {
clientSocket.write("HTTP/1.1 200 Connection Established\r\n\r\n");
tunnelEstablished = true;
// Forward any remaining data
if (remainingData.length > 0) {
targetSocket!.write(remainingData);
}
// Set up bidirectional piping
targetSocket!.on("data", (chunk: Uint8Array) => {
clientSocket.write(chunk);
});
});
targetSocket.on("error", () => {
if (!tunnelEstablished) {
clientSocket.write("HTTP/1.1 502 Bad Gateway\r\n\r\n");
}
clientSocket.end();
});
targetSocket.on("close", () => clientSocket.destroy());
clientSocket.on("close", () => targetSocket?.destroy());
});
clientSocket.on("error", () => {
targetSocket?.destroy();
});
});
}
/**
* Helper to start a proxy server and get its port.
*/
async function startProxy(server: net.Server): Promise<number> {
return new Promise<number>(resolve => {
server.listen(0, "127.0.0.1", () => {
const addr = server.address() as AddressInfo;
resolve(addr.port);
});
});
}
describe("https.request agent TLS options inheritance", () => {
describe("agent.options", () => {
test("inherits ca from agent.options", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent with ca in options
const agent = new https.Agent({
ca: tlsCerts.ca,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// NO ca here - should inherit from agent.options
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
test("inherits rejectUnauthorized from agent.options", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent with rejectUnauthorized: false in options
const agent = new https.Agent({
rejectUnauthorized: false,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// NO rejectUnauthorized here - should inherit from agent.options
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
test("inherits cert and key from agent.options", async () => {
// Create a server that uses TLS
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent with cert/key in options
const agent = new https.Agent({
rejectUnauthorized: false,
cert: tlsCerts.cert,
key: tlsCerts.key,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// NO cert/key here - should inherit from agent.options
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
});
// Test HttpsProxyAgent compatibility - these tests use real HttpsProxyAgent
// to verify HTTPS requests work through the proxy tunnel with TLS options
describe("HttpsProxyAgent TLS options", () => {
test("HttpsProxyAgent with rejectUnauthorized: false", async () => {
const { server, port, hostname } = await createHttpsServer();
const proxy = createConnectProxy();
const proxyPort = await startProxy(proxy);
try {
// Create HttpsProxyAgent for the proxy connection
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`, {
rejectUnauthorized: false,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// TLS options must also be passed here for Node.js compatibility
// https-proxy-agent doesn't propagate these to target connection in Node.js
// See: https://github.com/TooTallNate/node-https-proxy-agent/issues/35
rejectUnauthorized: false,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
proxy.close();
}
});
test("HttpsProxyAgent with ca option", async () => {
const { server, port, hostname } = await createHttpsServer();
const proxy = createConnectProxy();
const proxyPort = await startProxy(proxy);
try {
// Create HttpsProxyAgent for the proxy connection
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`, {
ca: tlsCerts.ca,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// TLS options must also be passed here for Node.js compatibility
ca: tlsCerts.ca,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
proxy.close();
}
});
test("HttpsProxyAgent with cert and key options", async () => {
const { server, port, hostname } = await createHttpsServer();
const proxy = createConnectProxy();
const proxyPort = await startProxy(proxy);
try {
// Create HttpsProxyAgent for the proxy connection
const agent = new HttpsProxyAgent(`http://127.0.0.1:${proxyPort}`, {
rejectUnauthorized: false,
cert: tlsCerts.cert,
key: tlsCerts.key,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// TLS options must also be passed here for Node.js compatibility
rejectUnauthorized: false,
cert: tlsCerts.cert,
key: tlsCerts.key,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
proxy.close();
}
});
});
describe("option precedence (matches Node.js)", () => {
// In Node.js, options are merged via spread in createSocket:
// options = { __proto__: null, ...options, ...this.options };
// https://github.com/nodejs/node/blob/v23.6.0/lib/_http_agent.js#L365
// With spread, the last one wins, so agent.options overwrites request options.
test("agent.options takes precedence over direct options", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent with correct CA
const agent = new https.Agent({
ca: tlsCerts.ca, // Correct CA in agent.options - should be used
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
ca: "wrong-ca-that-would-fail", // Wrong CA in request - should be ignored
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
test("direct options used when agent.options not set", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent without ca
const agent = new https.Agent({});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
ca: tlsCerts.ca, // Direct option should be used since agent.options.ca is not set
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
});
describe("other TLS options", () => {
test("inherits servername from agent.options", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
const agent = new https.Agent({
rejectUnauthorized: false,
servername: "localhost", // Should be passed to TLS
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
test("inherits ciphers from agent.options", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
const agent = new https.Agent({
rejectUnauthorized: false,
ciphers: "HIGH:!aNULL:!MD5", // Custom cipher suite
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
test("inherits passphrase from agent.options", async () => {
// Create server that accepts connections with encrypted key
const { server, port, hostname } = await createHttpsServer({
key: tlsCerts.encryptedKey,
passphrase: tlsCerts.passphrase,
});
try {
// Create an agent with encrypted key and passphrase in options
const agent = new https.Agent({
ca: tlsCerts.ca,
cert: tlsCerts.cert,
key: tlsCerts.encryptedKey,
passphrase: tlsCerts.passphrase,
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
// NO passphrase here - should inherit from agent.options
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
test("supports multiple CAs (array)", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent with CA as an array
const agent = new https.Agent({
ca: [tlsCerts.ca], // Array of CAs
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
});
describe("TLS error handling", () => {
test("rejects self-signed cert when rejectUnauthorized is true", async () => {
const { server, port, hostname } = await createHttpsServer();
try {
// Create an agent without CA and with rejectUnauthorized: true (default)
const agent = new https.Agent({
rejectUnauthorized: true,
// NO ca - should fail because cert is self-signed
});
const { promise, resolve, reject } = Promise.withResolvers<Error>();
const req = https.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
},
() => {
reject(new Error("Expected request to fail"));
},
);
req.on("error", resolve);
req.end();
const error = await promise;
// Should get a certificate error (self-signed cert not trusted)
if (
!(
error.message.includes("self-signed") ||
error.message.includes("SELF_SIGNED") ||
error.message.includes("certificate") ||
error.message.includes("unable to verify")
)
) {
throw new Error(`Expected certificate error, got: ${error.message}`);
}
} finally {
server.close();
}
});
});
});
describe("http.request agent options", () => {
test("does not fail when agent has TLS options (they are ignored for HTTP)", async () => {
const { server, port, hostname } = await createHttpServer();
try {
// Create an agent - TLS options passed via constructor should be ignored for HTTP
// Using type assertion since http.Agent doesn't normally accept TLS options
const agent = new (http.Agent as any)({
rejectUnauthorized: false,
ca: "some-ca",
});
const { promise, resolve, reject } = Promise.withResolvers<void>();
const req = http.request(
{
hostname,
port,
path: "/",
method: "GET",
agent,
},
res => {
res.on("data", () => {});
res.on("end", resolve);
},
);
req.on("error", reject);
req.end();
await promise;
} finally {
server.close();
}
});
});
// Only run in Bun to avoid infinite loop when Node.js runs this file
if (typeof Bun !== "undefined") {
const { bunEnv, nodeExe } = await import("harness");
describe("Node.js compatibility", () => {
test("all tests pass in Node.js", async () => {
const node = nodeExe();
if (!node) {
throw new Error("Node.js not found in PATH");
}
const testFile = fileURLToPath(import.meta.url);
await using proc = Bun.spawn({
cmd: [node, "--test", testFile],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
if (exitCode !== 0) {
throw new Error(`Node.js tests failed with code ${exitCode}\n${stderr}\n${stdout}`);
}
});
});
}

View File

@@ -0,0 +1,18 @@
# Node.js Compatibility Tests
These are official Node.js tests from the Node.js repository.
## Important Notes
- These are Node.js compatibility tests **not written by Bun**, so we cannot modify these tests
- The tests pass by exiting with code 0
## Running Tests
To run these tests with a debug build:
```bash
bun bd <file-path>
```
Note: `bun bd test <file-path>` does **not** work since these tests are meant to be run directly without the Bun test runner.

View File

@@ -0,0 +1,89 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, isWindows, tempDir } from "harness";
// https://github.com/oven-sh/bun/issues/25628
// Bug: Lazy code-splitting chunks are not accessible via frontend.files in fullstack builds
// when using --splitting with --compile. The chunks are physically written to disk and embedded
// in the executable, but they're filtered out when accessing the embedded files array.
test("lazy chunks from code splitting should appear in frontend.files", { timeout: 60000 }, async () => {
using dir = tempDir("issue-25628", {
// Server entry that prints frontend.files and exits
"server.ts": `
import frontend from "./client.html";
// Get all file paths from frontend.files
const filePaths = frontend.files?.map((f: any) => f.path) ?? [];
// Count the number of chunk files (lazy chunks are named chunk-xxx.js)
const chunkCount = filePaths.filter((p: string) =>
p.includes("chunk-")
).length;
// There should be at least 2 chunks:
// 1. The main app entry chunk
// 2. The lazy-loaded chunk from the dynamic import
console.log("CHUNK_COUNT:" + chunkCount);
console.log("FILES:" + filePaths.join(","));
// Exit immediately after printing
process.exit(0);
`,
"client.html": `<!DOCTYPE html>
<html>
<head>
<script type="module" src="./main.js"></script>
</head>
<body></body>
</html>`,
"main.js": `
// Dynamic import creates a lazy chunk
const lazyMod = () => import("./lazy.js");
lazyMod().then(m => m.hello());
`,
"lazy.js": `
export function hello() {
console.log("Hello from lazy module!");
}
`,
});
// Build with splitting and compile
await using buildProc = Bun.spawn({
cmd: [bunExe(), "build", "--compile", "server.ts", "--splitting", "--outfile", "server"],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [buildStdout, buildStderr, buildExitCode] = await Promise.all([
buildProc.stdout.text(),
buildProc.stderr.text(),
buildProc.exited,
]);
expect(buildStderr).not.toContain("error:");
expect(buildExitCode).toBe(0);
// Run the compiled executable
const serverPath = isWindows ? "server.exe" : "./server";
await using runProc = Bun.spawn({
cmd: [serverPath],
cwd: String(dir),
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [runStdout, runStderr, runExitCode] = await Promise.all([
runProc.stdout.text(),
runProc.stderr.text(),
runProc.exited,
]);
// There should be at least 2 chunk files in frontend.files:
// one for the main entry and one for the lazy-loaded module
expect(runStdout).toMatch(/CHUNK_COUNT:[2-9]/);
expect(runExitCode).toBe(0);
});

View File

@@ -0,0 +1,224 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
// https://github.com/oven-sh/bun/issues/25648
// Named function expression names should be renamed when they shadow an outer symbol
// that's referenced inside the function body. This prevents infinite recursion.
test("named function expression should be renamed when shadowing outer symbol", async () => {
using dir = tempDir("issue-25648", {
"lib.ts": `
export function get(x: number) {
return x * 2;
}
export function doSomething(fn: () => number) {
return fn();
}
`,
"index.ts": `
import * as $ from './lib';
export function test() {
return $.doSomething(function get() {
return $.get(123); // This should reference the outer get, not the function expression
});
}
console.log(test());
`,
});
// Bundle and run the code
await using buildProc = Bun.spawn({
cmd: [bunExe(), "build", "index.ts", "--bundle", "--outfile=out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [buildStdout, buildStderr, buildExitCode] = await Promise.all([
buildProc.stdout.text(),
buildProc.stderr.text(),
buildProc.exited,
]);
expect(buildStderr).toBe("");
expect(buildExitCode).toBe(0);
// Run the bundled output
await using runProc = Bun.spawn({
cmd: [bunExe(), "out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [runStdout, runStderr, runExitCode] = await Promise.all([
runProc.stdout.text(),
runProc.stderr.text(),
runProc.exited,
]);
// Should print 246 (123 * 2), NOT cause infinite recursion
expect(runStdout.trim()).toBe("246");
expect(runStderr).toBe("");
expect(runExitCode).toBe(0);
});
test("named function expression with namespace import should not cause infinite recursion", async () => {
using dir = tempDir("issue-25648-2", {
"svelte-mock.ts": `
export function get<T>(store: { value: T }): T {
return store.value;
}
export function set<T>(store: { value: T }, value: T) {
store.value = value;
}
export function bind_value(
element: HTMLElement,
get_fn: () => string,
set_fn: (value: string) => void
) {
return get_fn();
}
`,
"index.ts": `
import * as $ from './svelte-mock';
const query = { value: "hello" };
// This pattern is generated by the Svelte compiler in dev mode
const result = $.bind_value(
{} as HTMLElement,
function get() {
return $.get(query); // Should call outer $.get, not this function
},
function set($$value: string) {
$.set(query, $$value);
}
);
console.log(result);
`,
});
// Bundle and run the code
await using buildProc = Bun.spawn({
cmd: [bunExe(), "build", "index.ts", "--bundle", "--outfile=out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [buildStdout, buildStderr, buildExitCode] = await Promise.all([
buildProc.stdout.text(),
buildProc.stderr.text(),
buildProc.exited,
]);
expect(buildStderr).toBe("");
expect(buildExitCode).toBe(0);
// Run the bundled output
await using runProc = Bun.spawn({
cmd: [bunExe(), "out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [runStdout, runStderr, runExitCode] = await Promise.all([
runProc.stdout.text(),
runProc.stderr.text(),
runProc.exited,
]);
// Should print "hello", NOT cause "Maximum call stack size exceeded"
expect(runStdout.trim()).toBe("hello");
expect(runStderr).toBe("");
expect(runExitCode).toBe(0);
});
test("class expression name should be renamed when shadowing outer symbol", async () => {
using dir = tempDir("issue-25648-3", {
"lib.ts": `
export class Foo {
value = 42;
}
export function makeThing<T>(cls: new () => T): T {
return new cls();
}
`,
"index.ts": `
import * as $ from './lib';
export function test() {
return $.makeThing(class Foo extends $.Foo {
getValue() {
return this.value;
}
// Self-reference: uses the inner class name Foo
static create() {
return new Foo();
}
clone() {
return new Foo();
}
});
}
const instance = test();
console.log(instance.getValue());
// Test self-referencing static method
console.log((instance.constructor as any).create().getValue());
// Test self-referencing instance method
console.log(instance.clone().getValue());
`,
});
// Bundle and run the code
await using buildProc = Bun.spawn({
cmd: [bunExe(), "build", "index.ts", "--bundle", "--outfile=out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [buildStdout, buildStderr, buildExitCode] = await Promise.all([
buildProc.stdout.text(),
buildProc.stderr.text(),
buildProc.exited,
]);
expect(buildStderr).toBe("");
expect(buildExitCode).toBe(0);
// Run the bundled output
await using runProc = Bun.spawn({
cmd: [bunExe(), "out.js"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [runStdout, runStderr, runExitCode] = await Promise.all([
runProc.stdout.text(),
runProc.stderr.text(),
runProc.exited,
]);
// Should print 42 three times (getValue, static create().getValue, clone().getValue)
expect(runStdout.trim()).toBe("42\n42\n42");
expect(runStderr).toBe("");
expect(runExitCode).toBe(0);
});

View File

@@ -0,0 +1,106 @@
import { expect, test } from "bun:test";
import { tempDir } from "harness";
// Regression test for https://github.com/oven-sh/bun/issues/25785
// CSS logical border-radius properties were being silently dropped
test("CSS bundler should preserve logical border-radius properties", async () => {
using dir = tempDir("issue-25785", {
"test.css": `
.test1 {
border-start-start-radius: 0.75rem;
}
.test2 {
border-end-start-radius: 0.75rem;
}
.test3 {
border-start-end-radius: 0.75rem;
}
.test4 {
border-end-end-radius: 0.75rem;
}
.test5 {
border-top-left-radius: 0.75rem;
}
`,
});
const result = await Bun.build({
entrypoints: [`${dir}/test.css`],
outdir: `${dir}/dist`,
experimentalCss: true,
minify: false,
});
expect(result.success).toBe(true);
expect(result.outputs.length).toBe(1);
const output = await result.outputs[0].text();
// Logical properties are compiled to physical properties with LTR/RTL rules
// .test1 with border-start-start-radius compiles to border-top-left-radius (LTR) and border-top-right-radius (RTL)
expect(output).toContain(".test1");
expect(output).toContain("border-top-left-radius");
expect(output).toContain("border-top-right-radius");
// .test2 with border-end-start-radius compiles to border-bottom-left-radius (LTR) and border-bottom-right-radius (RTL)
expect(output).toContain(".test2");
expect(output).toContain("border-bottom-left-radius");
expect(output).toContain("border-bottom-right-radius");
// .test3 with border-start-end-radius
expect(output).toContain(".test3");
// .test4 with border-end-end-radius
expect(output).toContain(".test4");
// Physical property should also be preserved
expect(output).toContain(".test5");
});
test("CSS bundler should handle logical border-radius with targets that compile logical properties", async () => {
using dir = tempDir("issue-25785-compiled", {
"test.css": `
.test1 {
border-start-start-radius: 0.75rem;
}
.test2 {
border-end-start-radius: 0.75rem;
}
.test3 {
border-start-end-radius: 0.75rem;
}
.test4 {
border-end-end-radius: 0.75rem;
}
`,
});
const result = await Bun.build({
entrypoints: [`${dir}/test.css`],
outdir: `${dir}/dist`,
experimentalCss: true,
minify: false,
// Target older browsers that don't support logical properties
target: "browser",
});
expect(result.success).toBe(true);
expect(result.outputs.length).toBe(1);
const output = await result.outputs[0].text();
// When logical properties are compiled down, they should produce physical properties
// with :lang() selectors to handle LTR/RTL
// At minimum, the output should NOT be empty (the bug caused empty output)
expect(output.trim().length).toBeGreaterThan(0);
// Should have some border-radius output (compiled to physical)
expect(output).toMatch(/border-.*-radius/);
// All classes should be present in the output
expect(output).toContain(".test1");
expect(output).toContain(".test2");
expect(output).toContain(".test3");
expect(output).toContain(".test4");
});

View File

@@ -0,0 +1,169 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("ls -l shows long listing format", async () => {
// Create temp directory with test files
using dir = tempDir("ls-long-listing", {
"file.txt": "hello world",
"script.sh": "#!/bin/bash\necho hello",
subdir: {
"nested.txt": "nested content",
},
});
// Run ls -l in the temp directory
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
import { $ } from "bun";
$.cwd("${String(dir).replace(/\\/g, "\\\\")}");
const result = await $\`ls -l\`.text();
console.log(result);
`,
],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Verify no errors on stderr
expect(stderr).toBe("");
// Should show permission string (starts with - or d, followed by rwx/sStT permissions)
// Format: -rw-r--r-- 1 uid gid size date name
expect(stdout).toMatch(/^[-dlbcps][-rwxsStT]{9}/m); // Permission string pattern
expect(stdout).toContain("file.txt");
expect(stdout).toContain("script.sh");
expect(stdout).toContain("subdir");
// Verify that it's actually showing long format (contains size and date info)
// Long format has at least permissions, link count, uid, gid, size, date, name
const lines = stdout
.trim()
.split("\n")
.filter(line => line.includes("file.txt"));
expect(lines.length).toBeGreaterThan(0);
// Each line should have multiple space-separated fields
const fileLine = lines[0];
const fields = fileLine.trim().split(/\s+/);
expect(fields.length).toBeGreaterThanOrEqual(7); // perms, nlink, uid, gid, size, date fields, name
expect(exitCode).toBe(0);
});
test("ls without -l shows short format", async () => {
using dir = tempDir("ls-short-listing", {
"file1.txt": "content1",
"file2.txt": "content2",
});
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
import { $ } from "bun";
$.cwd("${String(dir).replace(/\\/g, "\\\\")}");
const result = await $\`ls\`.text();
console.log(result);
`,
],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Verify no errors on stderr
expect(stderr).toBe("");
// Short format should just show filenames, not permission strings
expect(stdout).not.toMatch(/^[-dlbcps][-rwxsStT]{9}/m);
expect(stdout).toContain("file1.txt");
expect(stdout).toContain("file2.txt");
expect(exitCode).toBe(0);
});
test("ls -al shows hidden files in long format", async () => {
using dir = tempDir("ls-all-long", {
".hidden": "hidden content",
"visible.txt": "visible content",
});
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
import { $ } from "bun";
$.cwd("${String(dir).replace(/\\/g, "\\\\")}");
const result = await $\`ls -al\`.text();
console.log(result);
`,
],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Verify no errors on stderr
expect(stderr).toBe("");
// Should show hidden files
expect(stdout).toContain(".hidden");
expect(stdout).toContain("visible.txt");
// Should also show . and .. entries
expect(stdout).toMatch(/^d[-rwxsStT]{9}.*\s\.$/m); // . directory
expect(stdout).toMatch(/^d[-rwxsStT]{9}.*\s\.\.$/m); // .. directory
// Should be in long format
expect(stdout).toMatch(/^[-dlbcps][-rwxsStT]{9}/m);
expect(exitCode).toBe(0);
});
test("ls -l shows directory type indicator", async () => {
using dir = tempDir("ls-dir-type", {
"regular-file.txt": "content",
subdir: {
"nested.txt": "nested",
},
});
await using proc = Bun.spawn({
cmd: [
bunExe(),
"-e",
`
import { $ } from "bun";
$.cwd("${String(dir).replace(/\\/g, "\\\\")}");
const result = await $\`ls -l\`.text();
console.log(result);
`,
],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Verify no errors on stderr
expect(stderr).toBe("");
// Directory should start with 'd'
expect(stdout).toMatch(/^d[-rwxsStT]{9}.*subdir$/m);
// Regular file should start with '-'
expect(stdout).toMatch(/^-[-rwxsStT]{9}.*regular-file\.txt$/m);
expect(exitCode).toBe(0);
});

View File

@@ -0,0 +1,133 @@
import { SQL, randomUUIDv7 } from "bun";
import { beforeEach, expect, test } from "bun:test";
import { describeWithContainer } from "harness";
describeWithContainer(
"mysql",
{
image: "mysql_plain",
env: {},
args: [],
},
container => {
const getOptions = () => ({
url: `mysql://root@${container.host}:${container.port}/bun_sql_test`,
max: 1,
bigint: true,
});
beforeEach(async () => {
await container.ready;
});
// Regression test for https://github.com/oven-sh/bun/issues/26030
// Bun hangs when executing multiple sequential MySQL transactions in a loop where:
// 1. An INSERT is awaited inside the transaction callback
// 2. A SELECT query (e.g., SELECT LAST_INSERT_ID()) is returned as an array without being awaited
test("Sequential transactions with INSERT and returned SELECT should not hang", async () => {
await using sql = new SQL(getOptions());
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
// Create a table similar to the reproduction case
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
id INT AUTO_INCREMENT PRIMARY KEY,
contract_name VARCHAR(255),
amount INT
)`;
try {
const rows = [
{ contract_name: "Contract A", amount: 100000 },
{ contract_name: "Contract B", amount: 200000 },
{ contract_name: "Contract C", amount: 300000 },
];
const contractIds: number[] = [];
for (const row of rows) {
// This is the pattern from the bug report:
// - INSERT is awaited
// - SELECT LAST_INSERT_ID() is returned as array (not awaited individually)
const [[result]] = await sql.begin(async tx => {
await tx`
INSERT INTO ${sql(random_name)} (contract_name, amount)
VALUES (${row.contract_name}, ${row.amount})
`;
// Return array with non-awaited query - this triggers the hang
return [tx`SELECT LAST_INSERT_ID() as id`];
});
contractIds.push(Number(result.id));
}
// Verify all transactions completed
expect(contractIds.length).toBe(3);
expect(contractIds[0]).toBe(1);
expect(contractIds[1]).toBe(2);
expect(contractIds[2]).toBe(3);
// Verify data in database
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
expect(Number(count[0].count)).toBe(3);
} finally {
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
}
});
test("Sequential transactions with returned array of multiple queries", async () => {
await using sql = new SQL(getOptions());
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
id INT AUTO_INCREMENT PRIMARY KEY,
value INT
)`;
try {
for (let i = 0; i < 3; i++) {
const results = await sql.begin(async tx => {
await tx`INSERT INTO ${sql(random_name)} (value) VALUES (${i * 10})`;
// Return multiple queries as array
return [tx`SELECT LAST_INSERT_ID() as id`, tx`SELECT COUNT(*) as count FROM ${sql(random_name)}`];
});
expect(results.length).toBe(2);
}
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
expect(Number(count[0].count)).toBe(3);
} finally {
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
}
});
test("Many sequential transactions with awaited INSERT and returned SELECT", async () => {
await using sql = new SQL(getOptions());
const random_name = ("t_" + randomUUIDv7("hex").replaceAll("-", "")).toLowerCase();
await sql`CREATE TABLE IF NOT EXISTS ${sql(random_name)} (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(255)
)`;
try {
// Multiple sequential transactions with awaited INSERT and returned SELECT
for (let i = 0; i < 5; i++) {
const [[result]] = await sql.begin(async tx => {
// First insert
await tx`INSERT INTO ${sql(random_name)} (name) VALUES (${"item_" + i})`;
// Return array with SELECT
return [tx`SELECT LAST_INSERT_ID() as id`];
});
expect(Number(result.id)).toBe(i + 1);
}
const count = await sql`SELECT COUNT(*) as count FROM ${sql(random_name)}`;
expect(Number(count[0].count)).toBe(5);
} finally {
await sql`DROP TABLE IF EXISTS ${sql(random_name)}`;
}
});
},
);

View File

@@ -0,0 +1,140 @@
import { SQL, randomUUIDv7 } from "bun";
import { afterAll, beforeAll, expect, test } from "bun:test";
import { describeWithContainer, isDockerEnabled } from "harness";
// Regression test for https://github.com/oven-sh/bun/issues/26063
// MySQL VARCHAR columns with binary collations (like utf8mb4_bin) were incorrectly
// returned as Buffer instead of string since version 1.3.6.
if (isDockerEnabled()) {
describeWithContainer(
"issue #26063: VARCHAR with binary collation returns Buffer instead of string",
{
image: "mysql_plain",
concurrent: true,
},
container => {
let sql: SQL;
beforeAll(async () => {
await container.ready;
sql = new SQL({
url: `mysql://root@${container.host}:${container.port}/bun_sql_test`,
max: 1,
});
});
afterAll(async () => {
await sql.close();
});
test("VARCHAR with utf8mb4_bin collation should return string (binary protocol)", async () => {
const tableName = "test_" + randomUUIDv7("hex").replaceAll("-", "");
await sql`
CREATE TEMPORARY TABLE ${sql(tableName)} (
id VARCHAR(32) COLLATE utf8mb4_bin NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARACTER SET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`;
await sql`INSERT INTO ${sql(tableName)} ${sql([{ id: "1" }, { id: "2" }])}`;
const result = await sql`SELECT * FROM ${sql(tableName)}`;
// Should return strings, not Buffers
expect(typeof result[0].id).toBe("string");
expect(typeof result[1].id).toBe("string");
expect(result[0].id).toBe("1");
expect(result[1].id).toBe("2");
});
test("VARCHAR with utf8mb4_bin collation should return string (text protocol)", async () => {
const tableName = "test_" + randomUUIDv7("hex").replaceAll("-", "");
await sql`
CREATE TEMPORARY TABLE ${sql(tableName)} (
id VARCHAR(32) COLLATE utf8mb4_bin NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARACTER SET=utf8mb4 COLLATE=utf8mb4_unicode_ci
`;
await sql`INSERT INTO ${sql(tableName)} ${sql([{ id: "1" }, { id: "2" }])}`;
// Use .simple() to force text protocol
const result = await sql`SELECT * FROM ${sql(tableName)}`.simple();
// Should return strings, not Buffers
expect(typeof result[0].id).toBe("string");
expect(typeof result[1].id).toBe("string");
expect(result[0].id).toBe("1");
expect(result[1].id).toBe("2");
});
test("CHAR with utf8mb4_bin collation should return string", async () => {
const tableName = "test_" + randomUUIDv7("hex").replaceAll("-", "");
await sql`
CREATE TEMPORARY TABLE ${sql(tableName)} (
code CHAR(10) COLLATE utf8mb4_bin NOT NULL
)
`;
await sql`INSERT INTO ${sql(tableName)} VALUES (${"ABC"})`;
const result = await sql`SELECT * FROM ${sql(tableName)}`;
const resultSimple = await sql`SELECT * FROM ${sql(tableName)}`.simple();
// Should return strings, not Buffers
expect(typeof result[0].code).toBe("string");
expect(typeof resultSimple[0].code).toBe("string");
});
test("TEXT with utf8mb4_bin collation should return string", async () => {
const tableName = "test_" + randomUUIDv7("hex").replaceAll("-", "");
await sql`
CREATE TEMPORARY TABLE ${sql(tableName)} (
content TEXT COLLATE utf8mb4_bin
)
`;
await sql`INSERT INTO ${sql(tableName)} VALUES (${"Hello, World!"})`;
const result = await sql`SELECT * FROM ${sql(tableName)}`;
const resultSimple = await sql`SELECT * FROM ${sql(tableName)}`.simple();
// Should return strings, not Buffers
expect(typeof result[0].content).toBe("string");
expect(result[0].content).toBe("Hello, World!");
expect(typeof resultSimple[0].content).toBe("string");
expect(resultSimple[0].content).toBe("Hello, World!");
});
test("true BINARY/VARBINARY columns should still return Buffer", async () => {
const tableName = "test_" + randomUUIDv7("hex").replaceAll("-", "");
await sql`
CREATE TEMPORARY TABLE ${sql(tableName)} (
a BINARY(4),
b VARBINARY(10),
c BLOB
)
`;
await sql`INSERT INTO ${sql(tableName)} VALUES (${Buffer.from([1, 2, 3, 4])}, ${Buffer.from([5, 6])}, ${Buffer.from([7, 8, 9])})`;
const result = await sql`SELECT * FROM ${sql(tableName)}`;
const resultSimple = await sql`SELECT * FROM ${sql(tableName)}`.simple();
// True binary types should return Buffers
expect(Buffer.isBuffer(result[0].a)).toBe(true);
expect(Buffer.isBuffer(result[0].b)).toBe(true);
expect(Buffer.isBuffer(result[0].c)).toBe(true);
expect(Buffer.isBuffer(resultSimple[0].a)).toBe(true);
expect(Buffer.isBuffer(resultSimple[0].b)).toBe(true);
expect(Buffer.isBuffer(resultSimple[0].c)).toBe(true);
});
},
);
}