Compare commits

..

2 Commits

Author SHA1 Message Date
Jarred Sumner
1a88df0cda Add crash reporting for fs_watch and fs_watchfile 2026-02-18 21:01:55 -08:00
Jarred Sumner
dc4dd3aa5b fix(fs): create owned copies of watch event paths on Windows (#27099)
`onPathUpdateWindows` stored the `Event` directly from
`PathWatcherManager.onFileUpdate` without creating an owned copy. The
event's path was a `[]const u8` sub-slice of the watchlist's storage,
auto-coerced to `StringOrBytesToDecode{.bytes_to_free}`. When
`FSWatchTaskWindows.run()` later accessed `path.string` (for utf8
encoding), it reinterpreted the raw pointer bytes as a `bun.String`,
whose `tag` field contained the LSB of a heap pointer — almost always
an invalid enum value, causing `panic: switch on corrupt value` in
`String.deref()`.

The original implementation in #9972 properly created owned copies per
encoding (`bun.String.createUTF8` for utf8, `allocator.dupeZ` for
others), but this was lost during the watcher refactor in #10492.

Restore correct ownership by creating a `bun.String` for utf8 encoding
or a duped `[]const u8` for other encodings before enqueuing the task.

Closes #27099, closes #27108.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 20:55:57 -08:00
48 changed files with 275 additions and 1315 deletions

View File

@@ -1,30 +0,0 @@
name: Close stale robobun PRs
on:
schedule:
- cron: "30 0 * * *"
workflow_dispatch:
jobs:
close-stale-robobun-prs:
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
pull-requests: write
steps:
- name: Close stale robobun PRs
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
run: |
ninety_days_ago=$(date -u -d '90 days ago' +%Y-%m-%dT%H:%M:%SZ)
gh pr list \
--author robobun \
--state open \
--json number,updatedAt \
--limit 1000 \
--jq ".[] | select(.updatedAt < \"$ninety_days_ago\") | .number" |
while read -r pr_number; do
echo "Closing PR #$pr_number (last updated before $ninety_days_ago)"
gh pr close "$pr_number" --comment "Closing this PR because it has been inactive for more than 90 days."
done

View File

@@ -91,6 +91,8 @@ pub const Features = struct {
pub var yaml_parse: usize = 0;
pub var cpu_profile: usize = 0;
pub var heap_snapshot: usize = 0;
pub var fs_watch: usize = 0;
pub var fs_watchfile: usize = 0;
comptime {
@export(&napi_module_register, .{ .name = "Bun__napi_module_register_count" });

View File

@@ -726,7 +726,7 @@ pub const AsyncModule = struct {
return ResolvedSource{
.allocator = null,
.source_code = bun.String.cloneUTF8(printer.ctx.getWritten()),
.source_code = bun.String.cloneLatin1(printer.ctx.getWritten()),
.specifier = String.init(specifier),
.source_url = String.init(path.text),
.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs,

View File

@@ -387,7 +387,7 @@ pub fn transpileSourceCode(
const bytecode_slice = parse_result.already_bundled.bytecodeSlice();
return ResolvedSource{
.allocator = null,
.source_code = bun.String.cloneUTF8(source.contents),
.source_code = bun.String.cloneLatin1(source.contents),
.specifier = input_specifier,
.source_url = input_specifier.createIfDifferent(path.text),
.already_bundled = true,
@@ -570,7 +570,7 @@ pub fn transpileSourceCode(
.allocator = null,
.source_code = brk: {
const written = printer.ctx.getWritten();
const result = cache.output_code orelse bun.String.cloneUTF8(written);
const result = cache.output_code orelse bun.String.cloneLatin1(written);
if (written.len > 1024 * 1024 * 2 or jsc_vm.smol) {
printer.ctx.buffer.deinit();

View File

@@ -15,8 +15,7 @@
/// Version 16: Added typeof undefined minification optimization.
/// Version 17: Removed transpiler import rewrite for bun:test. Not bumping it causes test/js/bun/http/req-url-leak.test.ts to fail with SyntaxError: Export named 'expect' not found in module 'bun:test'.
/// Version 18: Include ESM record (module info) with an ES Module, see #15758
/// Version 19: Emits utf-8 files in rare cases (tagged templates, regex with unicode)
const expected_version = 19;
const expected_version = 18;
const debug = Output.scoped(.cache, .visible);
const MINIMUM_CACHE_SIZE = 50 * 1024;
@@ -687,7 +686,7 @@ pub const RuntimeTranspilerCache = struct {
return;
}
bun.assert(this.entry == null);
const output_code = bun.String.cloneUTF8(output_code_bytes);
const output_code = bun.String.cloneLatin1(output_code_bytes);
this.output_code = output_code;
toFile(this.input_byte_length.?, this.input_hash.?, this.features_hash.?, sourcemap, esm_record, output_code, this.exports_kind) catch |err| {
@@ -695,7 +694,7 @@ pub const RuntimeTranspilerCache = struct {
return;
};
if (comptime bun.Environment.allow_assert)
debug("put() = {d} bytes", .{output_code.length()});
debug("put() = {d} bytes", .{output_code.latin1().len});
}
};

View File

@@ -500,7 +500,7 @@ pub const RuntimeTranspilerStore = struct {
const bytecode_slice = parse_result.already_bundled.bytecodeSlice();
this.resolved_source = ResolvedSource{
.allocator = null,
.source_code = bun.String.cloneUTF8(parse_result.source.contents),
.source_code = bun.String.cloneLatin1(parse_result.source.contents),
.already_bundled = true,
.bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null,
.bytecode_cache_size = bytecode_slice.len,
@@ -577,7 +577,7 @@ pub const RuntimeTranspilerStore = struct {
const source_code = brk: {
const written = printer.ctx.getWritten();
const result = cache.output_code orelse bun.String.cloneUTF8(written);
const result = cache.output_code orelse bun.String.cloneLatin1(written);
if (written.len > 1024 * 1024 * 2 or vm.smol) {
printer.ctx.buffer.deinit();

View File

@@ -1088,7 +1088,7 @@ pub const WindowsSpawnOptions = struct {
pub fn deinit(this: *const Stdio) void {
if (this.* == .buffer) {
this.buffer.closeAndDestroy();
bun.default_allocator.destroy(this.buffer);
}
}
};

View File

@@ -1,6 +1,5 @@
const WindowsNamedPipeContext = @This();
ref_count: RefCount,
named_pipe: uws.WindowsNamedPipe,
socket: SocketType,
@@ -11,14 +10,6 @@ task: jsc.AnyTask,
task_event: EventState = .none,
is_open: bool = false,
const RefCount = bun.ptr.RefCount(@This(), "ref_count", scheduleDeinit, .{});
pub const ref = RefCount.ref;
pub const deref = RefCount.deref;
fn scheduleDeinit(this: *WindowsNamedPipeContext) void {
this.deinitInNextTick();
}
pub const EventState = enum(u8) {
deinit,
none,
@@ -157,7 +148,7 @@ fn onClose(this: *WindowsNamedPipeContext) void {
.none => {},
}
this.deref();
this.deinitInNextTick();
}
fn runEvent(this: *WindowsNamedPipeContext) void {
@@ -178,7 +169,6 @@ fn deinitInNextTick(this: *WindowsNamedPipeContext) void {
pub fn create(globalThis: *jsc.JSGlobalObject, socket: SocketType) *WindowsNamedPipeContext {
const vm = globalThis.bunVM();
const this = WindowsNamedPipeContext.new(.{
.ref_count = .init(),
.vm = vm,
.globalThis = globalThis,
.task = undefined,
@@ -189,8 +179,6 @@ pub fn create(globalThis: *jsc.JSGlobalObject, socket: SocketType) *WindowsNamed
// named_pipe owns the pipe (PipeWriter owns the pipe and will close and deinit it)
this.named_pipe = uws.WindowsNamedPipe.from(bun.handleOom(bun.default_allocator.create(uv.Pipe)), .{
.ctx = this,
.ref_ctx = @ptrCast(&WindowsNamedPipeContext.ref),
.deref_ctx = @ptrCast(&WindowsNamedPipeContext.deref),
.onOpen = @ptrCast(&WindowsNamedPipeContext.onOpen),
.onData = @ptrCast(&WindowsNamedPipeContext.onData),
.onHandshake = @ptrCast(&WindowsNamedPipeContext.onHandshake),
@@ -230,7 +218,7 @@ pub fn open(globalThis: *jsc.JSGlobalObject, fd: bun.FileDescriptor, ssl_config:
},
.none => {},
}
this.deref();
this.deinitInNextTick();
}
try this.named_pipe.open(fd, ssl_config).unwrap();
return &this.named_pipe;
@@ -250,7 +238,7 @@ pub fn connect(globalThis: *jsc.JSGlobalObject, path: []const u8, ssl_config: ?j
},
.none => {},
}
this.deref();
this.deinitInNextTick();
}
if (path[path.len - 1] == 0) {

View File

@@ -1164,7 +1164,6 @@ void JSCommonJSModule::visitChildrenImpl(JSCell* cell, Visitor& visitor)
visitor.appendHidden(thisObject->m_dirname);
visitor.appendHidden(thisObject->m_paths);
visitor.appendHidden(thisObject->m_overriddenParent);
visitor.appendHidden(thisObject->m_overriddenCompile);
visitor.appendHidden(thisObject->m_childrenValue);
visitor.appendValues(thisObject->m_children.begin(), thisObject->m_children.size());
}

View File

@@ -703,17 +703,6 @@ void NodeVMSpecialSandbox::finishCreation(VM& vm)
const JSC::ClassInfo NodeVMSpecialSandbox::s_info = { "NodeVMSpecialSandbox"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(NodeVMSpecialSandbox) };
template<typename Visitor>
void NodeVMSpecialSandbox::visitChildrenImpl(JSCell* cell, Visitor& visitor)
{
auto* thisObject = jsCast<NodeVMSpecialSandbox*>(cell);
ASSERT_GC_OBJECT_INHERITS(thisObject, info());
Base::visitChildren(thisObject, visitor);
visitor.append(thisObject->m_parentGlobal);
}
DEFINE_VISIT_CHILDREN(NodeVMSpecialSandbox);
NodeVMGlobalObject::NodeVMGlobalObject(JSC::VM& vm, JSC::Structure* structure, NodeVMContextOptions contextOptions, JSValue importer)
: Base(vm, structure, &globalObjectMethodTable())
, m_dynamicImportCallback(vm, this, importer)

View File

@@ -85,7 +85,6 @@ public:
static NodeVMSpecialSandbox* create(VM& vm, Structure* structure, NodeVMGlobalObject* globalObject);
DECLARE_INFO;
DECLARE_VISIT_CHILDREN;
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm);
static Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype);

View File

@@ -2450,20 +2450,13 @@ void JSC__JSObject__putRecord(JSC::JSObject* object, JSC::JSGlobalObject* global
descriptor.setValue(JSC::jsString(global->vm(), Zig::toStringCopy(values[0])));
} else {
// Pre-convert all strings to JSValues before entering ObjectInitializationScope,
// since jsString() allocates GC cells which is not allowed inside the scope.
MarkedArgumentBuffer strings;
for (size_t i = 0; i < valuesLen; ++i) {
strings.append(JSC::jsString(global->vm(), Zig::toStringCopy(values[i])));
}
JSC::JSArray* array = nullptr;
{
JSC::ObjectInitializationScope initializationScope(global->vm());
if ((array = JSC::JSArray::tryCreateUninitializedRestricted(initializationScope, nullptr, global->arrayStructureForIndexingTypeDuringAllocation(JSC::ArrayWithContiguous), valuesLen))) {
for (size_t i = 0; i < valuesLen; ++i) {
array->initializeIndexWithoutBarrier(initializationScope, i, strings.at(i));
array->initializeIndexWithoutBarrier(initializationScope, i, JSC::jsString(global->vm(), Zig::toStringCopy(values[i])));
}
}
}
@@ -2497,13 +2490,6 @@ void JSC__JSValue__putRecord(JSC::EncodedJSValue objectValue, JSC::JSGlobalObjec
descriptor.setValue(JSC::jsString(global->vm(), Zig::toString(values[0])));
} else {
// Pre-convert all strings to JSValues before entering ObjectInitializationScope,
// since jsString() allocates GC cells which is not allowed inside the scope.
MarkedArgumentBuffer strings;
for (size_t i = 0; i < valuesLen; ++i) {
strings.append(JSC::jsString(global->vm(), Zig::toString(values[i])));
}
JSC::JSArray* array = nullptr;
{
JSC::ObjectInitializationScope initializationScope(global->vm());
@@ -2514,7 +2500,7 @@ void JSC__JSValue__putRecord(JSC::EncodedJSValue objectValue, JSC::JSGlobalObjec
for (size_t i = 0; i < valuesLen; ++i) {
array->initializeIndexWithoutBarrier(
initializationScope, i, strings.at(i));
initializationScope, i, JSC::jsString(global->vm(), Zig::toString(values[i])));
}
}
}

View File

@@ -759,6 +759,8 @@ static void initializeColumnNames(JSC::JSGlobalObject* lexicalGlobalObject, JSSQ
// Slow path:
JSC::ObjectInitializationScope initializationScope(vm);
// 64 is the maximum we can preallocate here
// see https://github.com/oven-sh/bun/issues/987
JSObject* prototype = castedThis->userPrototype ? castedThis->userPrototype.get() : lexicalGlobalObject->objectPrototype();
@@ -2020,7 +2022,7 @@ JSC_DEFINE_HOST_FUNCTION(jsSQLStatementSetPrototypeFunction, (JSGlobalObject * l
return {};
}
castedThis->userPrototype.set(vm, castedThis, prototype.getObject());
castedThis->userPrototype.set(vm, classObject, prototype.getObject());
// Force the prototypes to be re-created
if (castedThis->version_db) {

View File

@@ -929,7 +929,7 @@ pub const SendQueue = struct {
return err;
};
ipc_pipe.open(pipe_fd).unwrap() catch |err| {
ipc_pipe.closeAndDestroy();
bun.default_allocator.destroy(ipc_pipe);
return err;
};
ipc_pipe.unref();

View File

@@ -486,9 +486,11 @@ pub const StatWatcher = struct {
/// After a restat found the file changed, this calls the listener function.
pub fn swapAndCallListenerOnMainThread(this: *StatWatcher) void {
bun.analytics.Features.fs_watchfile += 1;
defer this.deref(); // Balance the ref from restat().
const prev_jsvalue = this.last_jsvalue.swap();
const globalThis = this.globalThis;
const current_jsvalue = statToJSStats(globalThis, &this.getLastStat(), this.bigint) catch return; // TODO: properly propagate exception upwards
this.last_jsvalue.set(globalThis, current_jsvalue);

View File

@@ -261,6 +261,7 @@ pub const FSWatcher = struct {
pub fn onPathUpdatePosix(ctx: ?*anyopaque, event: Event, is_file: bool) void {
const this = bun.cast(*FSWatcher, ctx.?);
bun.analytics.Features.fs_watch += 1;
if (this.verbose) {
switch (event) {
@@ -281,6 +282,7 @@ pub const FSWatcher = struct {
pub fn onPathUpdateWindows(ctx: ?*anyopaque, event: Event, is_file: bool) void {
const this = bun.cast(*FSWatcher, ctx.?);
bun.analytics.Features.fs_watch += 1;
if (this.verbose) {
switch (event) {
@@ -299,9 +301,23 @@ pub const FSWatcher = struct {
return;
}
// The event's path comes from PathWatcherManager.onFileUpdate as a
// []const u8 sub-slice of the watchlist's file_path storage, auto-coerced
// to StringOrBytesToDecode{.bytes_to_free}. We must create a properly
// owned copy: either a bun.String for utf8 encoding or a duped []const u8
// for other encodings.
const owned_event: Event = switch (event) {
inline .rename, .change => |path, t| @unionInit(Event, @tagName(t), if (this.encoding == .utf8)
FSWatchTaskWindows.StringOrBytesToDecode{ .string = bun.String.cloneUTF8(path.bytes_to_free) }
else
FSWatchTaskWindows.StringOrBytesToDecode{ .bytes_to_free = bun.default_allocator.dupe(u8, path.bytes_to_free) catch return }),
.@"error" => |err| .{ .@"error" = err.clone(bun.default_allocator) },
inline else => |value, t| @unionInit(Event, @tagName(t), value),
};
const task = bun.new(FSWatchTaskWindows, .{
.ctx = this,
.event = event,
.event = owned_event,
});
this.eventLoop().enqueueTask(jsc.Task.init(task));
}

View File

@@ -587,12 +587,12 @@ pub const PathLike = union(enum) {
if (std.fs.path.isAbsolute(sliced)) {
if (sliced.len > 2 and bun.path.isDriveLetter(sliced[0]) and sliced[1] == ':' and bun.path.isSepAny(sliced[2])) {
// Add the long path syntax. This affects most of node:fs
// Normalize the path directly into buf without an intermediate
// buffer. The input (sliced) already has a drive letter, so
// resolveCWDWithExternalBufZ would just memcpy it, making the
// temporary allocation unnecessary.
const drive_resolve_buf = bun.path_buffer_pool.get();
defer bun.path_buffer_pool.put(drive_resolve_buf);
const rest = path_handler.PosixToWinNormalizer.resolveCWDWithExternalBufZ(drive_resolve_buf, sliced) catch @panic("Error while resolving path.");
buf[0..4].* = bun.windows.long_path_prefix_u8;
const n = bun.path.normalizeBuf(sliced, buf[4..], .windows).len;
// When long path syntax is used, the entire string should be normalized
const n = bun.path.normalizeBuf(rest, buf[4..], .windows).len;
buf[4 + n] = 0;
return buf[0 .. 4 + n :0];
}

View File

@@ -75,7 +75,6 @@ pub const PendingValue = struct {
onStartBuffering: ?*const fn (ctx: *anyopaque) void = null,
onStartStreaming: ?*const fn (ctx: *anyopaque) jsc.WebCore.DrainResult = null,
onReadableStreamAvailable: ?*const fn (ctx: *anyopaque, globalThis: *jsc.JSGlobalObject, readable: jsc.WebCore.ReadableStream) void = null,
onStreamCancelled: ?*const fn (ctx: ?*anyopaque) void = null,
size_hint: Blob.SizeType = 0,
deinit: bool = false,
@@ -496,13 +495,6 @@ pub const Value = union(Tag) {
.globalThis = globalThis,
});
if (locked.onStreamCancelled) |onCancelled| {
if (locked.task) |task| {
reader.cancel_handler = onCancelled;
reader.cancel_ctx = task;
}
}
reader.context.setup();
if (drain_result == .estimated_size) {
@@ -823,10 +815,16 @@ pub const Value = union(Tag) {
}
pub fn tryUseAsAnyBlob(this: *Value) ?AnyBlob {
if (this.* == .WTFStringImpl) {
if (this.WTFStringImpl.canUseAsUTF8()) {
return AnyBlob{ .WTFStringImpl = this.WTFStringImpl };
}
}
const any_blob: AnyBlob = switch (this.*) {
.Blob => .{ .Blob = this.Blob },
.InternalBlob => .{ .InternalBlob = this.InternalBlob },
.WTFStringImpl => |str| if (str.canUseAsUTF8()) .{ .WTFStringImpl = str } else return null,
.Blob => AnyBlob{ .Blob = this.Blob },
.InternalBlob => AnyBlob{ .InternalBlob = this.InternalBlob },
// .InlineBlob => AnyBlob{ .InlineBlob = this.InlineBlob },
.Locked => this.Locked.toAnyBlobAllowPromise() orelse return null,
else => return null,
};

View File

@@ -442,8 +442,6 @@ pub fn NewSource(
close_handler: ?*const fn (?*anyopaque) void = null,
close_ctx: ?*anyopaque = null,
close_jsvalue: jsc.Strong.Optional = .empty,
cancel_handler: ?*const fn (?*anyopaque) void = null,
cancel_ctx: ?*anyopaque = null,
globalThis: *JSGlobalObject = undefined,
this_jsvalue: jsc.JSValue = .zero,
is_closed: bool = false,
@@ -495,10 +493,6 @@ pub fn NewSource(
this.cancelled = true;
onCancel(&this.context);
if (this.cancel_handler) |handler| {
this.cancel_handler = null;
handler(this.cancel_ctx);
}
}
pub fn onClose(this: *This) void {

View File

@@ -231,7 +231,6 @@ pub const FetchTasklet = struct {
response.unref();
}
this.clearStreamCancelHandler();
this.readable_stream_ref.deinit();
this.scheduled_response_buffer.deinit();
@@ -364,7 +363,6 @@ pub const FetchTasklet = struct {
bun.default_allocator,
);
} else {
this.clearStreamCancelHandler();
var prev = this.readable_stream_ref;
this.readable_stream_ref = .{};
defer prev.deinit();
@@ -867,25 +865,6 @@ pub const FetchTasklet = struct {
};
}
/// Clear the cancel_handler on the ByteStream.Source to prevent use-after-free.
/// Must be called before releasing readable_stream_ref, while the Strong ref
/// still keeps the ReadableStream (and thus the ByteStream.Source) alive.
fn clearStreamCancelHandler(this: *FetchTasklet) void {
if (this.readable_stream_ref.get(this.global_this)) |readable| {
if (readable.ptr == .Bytes) {
const source = readable.ptr.Bytes.parent();
source.cancel_handler = null;
source.cancel_ctx = null;
}
}
}
fn onStreamCancelledCallback(ctx: ?*anyopaque) void {
const this = bun.cast(*FetchTasklet, ctx.?);
if (this.ignore_data) return;
this.ignoreRemainingResponseBody();
}
fn toBodyValue(this: *FetchTasklet) Body.Value {
if (this.getAbortError()) |err| {
return .{ .Error = err };
@@ -898,7 +877,6 @@ pub const FetchTasklet = struct {
.global = this.global_this,
.onStartStreaming = FetchTasklet.onStartStreamingHTTPResponseBodyCallback,
.onReadableStreamAvailable = FetchTasklet.onReadableStreamAvailable,
.onStreamCancelled = FetchTasklet.onStreamCancelledCallback,
},
};
return response;
@@ -952,8 +930,7 @@ pub const FetchTasklet = struct {
// we should not keep the process alive if we are ignoring the body
const vm = this.javascript_vm;
this.poll_ref.unref(vm);
// clean any remaining references
this.clearStreamCancelHandler();
// clean any remaining refereces
this.readable_stream_ref.deinit();
this.response.deinit();

View File

@@ -154,8 +154,10 @@ fn prepareCssAstsForChunkImpl(c: *LinkerContext, chunk: *Chunk, allocator: std.m
filter: {
// Filter out "@charset", "@import", and leading "@layer" rules
// TODO: we are doing simple version rn, only @import
for (ast.rules.v.items, 0..) |*rule, ruleidx| {
if (rule.* == .import or rule.* == .ignored or rule.* == .layer_statement) {} else {
// if ((rule.* == .import and import_records[source_index.get()].at(rule.import.import_record_idx).flags.is_internal) or rule.* == .ignored) {} else {
if (rule.* == .import or rule.* == .ignored) {} else {
// It's okay to do this because AST is allocated into arena
const reslice = ast.rules.v.items[ruleidx..];
ast.rules.v = .{

View File

@@ -452,14 +452,6 @@ pub fn CssRuleList(comptime AtRule: type) type {
}
bun.handleOom(rules.append(context.allocator, rule.*));
moved_rule = true;
// Non-style rules (e.g. @property, @keyframes) act as a barrier for
// style rule deduplication. We cannot safely merge identical style rules
// across such boundaries because the intervening at-rule may affect how
// the declarations are interpreted (e.g. @property defines a custom
// property that a :root rule above may set differently than one below).
style_rules.clearRetainingCapacity();
}
// MISSING SHIT HERE

View File

@@ -1414,17 +1414,6 @@ pub const Pipe = extern struct {
pub fn asStream(this: *@This()) *uv_stream_t {
return @ptrCast(this);
}
/// Close the pipe handle and then free it in the close callback.
/// Use this when a pipe has been init'd but needs to be destroyed
/// (e.g. when open() fails after init() succeeded).
pub fn closeAndDestroy(this: *@This()) void {
this.close(&onCloseDestroy);
}
fn onCloseDestroy(handle: *@This()) callconv(.c) void {
bun.default_allocator.destroy(handle);
}
};
const union_unnamed_416 = extern union {
fd: c_int,

View File

@@ -51,8 +51,6 @@ pub const Flags = packed struct(u8) {
};
pub const Handlers = struct {
ctx: *anyopaque,
ref_ctx: *const fn (*anyopaque) void,
deref_ctx: *const fn (*anyopaque) void,
onOpen: *const fn (*anyopaque) void,
onHandshake: *const fn (*anyopaque, bool, uws.us_bun_verify_error_t) void,
onData: *const fn (*anyopaque, []const u8) void,
@@ -273,16 +271,7 @@ pub fn from(
.handlers = handlers,
};
}
pub fn ref(this: *WindowsNamedPipe) void {
this.handlers.ref_ctx(this.handlers.ctx);
}
pub fn deref(this: *WindowsNamedPipe) void {
this.handlers.deref_ctx(this.handlers.ctx);
}
fn onConnect(this: *WindowsNamedPipe, status: uv.ReturnCode) void {
defer this.deref();
if (this.pipe) |pipe| {
_ = pipe.unref();
}
@@ -387,7 +376,6 @@ pub fn open(this: *WindowsNamedPipe, fd: bun.FileDescriptor, ssl_options: ?jsc.A
return openResult;
}
this.ref();
onConnect(this, uv.ReturnCode.zero);
return .success;
}
@@ -422,12 +410,7 @@ pub fn connect(this: *WindowsNamedPipe, path: []const u8, ssl_options: ?jsc.API.
}
this.connect_req.data = this;
const result = this.pipe.?.connect(&this.connect_req, path, this, onConnect);
if (result.asErr() != null) {
return result;
}
this.ref();
return result;
return this.pipe.?.connect(&this.connect_req, path, this, onConnect);
}
pub fn startTLS(this: *WindowsNamedPipe, ssl_options: jsc.API.ServerConfig.SSLConfig, is_client: bool) !void {
this.flags.is_ssl = true;

View File

@@ -694,8 +694,8 @@ pub fn PosixStreamingWriter(comptime Parent: type, comptime function_table: anyt
}
pub fn deinit(this: *PosixWriter) void {
this.closeWithoutReporting();
this.outgoing.deinit();
this.closeWithoutReporting();
}
pub fn hasRef(this: *PosixWriter) bool {
@@ -815,39 +815,29 @@ fn BaseWindowsPipeWriter(
pub fn close(this: *WindowsPipeWriter) void {
this.is_done = true;
const source = this.source orelse return;
// Check for in-flight file write before detaching. detach()
// nulls fs.data so onFsWriteComplete can't recover the writer
// to call deref(). We must balance processSend's ref() here.
const has_inflight_write = if (@hasField(WindowsPipeWriter, "current_payload")) switch (source) {
.sync_file, .file => |file| file.state == .operating or file.state == .canceling,
else => false,
} else false;
switch (source) {
.sync_file, .file => |file| {
// Use state machine to handle close after operation completes
if (this.owns_fd) {
file.detach();
} else {
// Don't own fd, just stop operations and detach parent
file.stop();
file.fs.data = null;
}
},
.pipe => |pipe| {
pipe.data = pipe;
pipe.close(onPipeClose);
},
.tty => |tty| {
tty.data = tty;
tty.close(onTTYClose);
},
}
this.source = null;
this.onCloseSource();
// Deref last — this may free the parent and `this`.
if (has_inflight_write) {
this.parent.deref();
if (this.source) |source| {
switch (source) {
.sync_file, .file => |file| {
// Use state machine to handle close after operation completes
if (this.owns_fd) {
file.detach();
} else {
// Don't own fd, just stop operations and detach parent
file.stop();
file.fs.data = null;
}
},
.pipe => |pipe| {
pipe.data = pipe;
pipe.close(onPipeClose);
},
.tty => |tty| {
tty.data = tty;
tty.close(onTTYClose);
},
}
this.source = null;
this.onCloseSource();
}
}
@@ -1308,10 +1298,6 @@ pub fn WindowsStreamingWriter(comptime Parent: type, function_table: anytype) ty
}
fn onWriteComplete(this: *WindowsWriter, status: uv.ReturnCode) void {
// Deref the parent at the end to balance the ref taken in
// processSend before submitting the async write request.
defer this.parent.deref();
if (status.toError(.write)) |err| {
this.last_write_result = .{ .err = err };
log("onWrite() = {s}", .{err.name()});
@@ -1361,8 +1347,7 @@ pub fn WindowsStreamingWriter(comptime Parent: type, function_table: anytype) ty
// ALWAYS complete first
file.complete(was_canceled);
// If detached, file may be closing (owned fd) or just stopped (non-owned fd).
// The deref to balance processSend's ref was already done in close().
// If detached, file may be closing (owned fd) or just stopped (non-owned fd)
if (parent_ptr == null) {
return;
}
@@ -1370,21 +1355,17 @@ pub fn WindowsStreamingWriter(comptime Parent: type, function_table: anytype) ty
const this = bun.cast(*WindowsWriter, parent_ptr);
if (was_canceled) {
// Canceled write - reset buffers and deref to balance processSend ref
// Canceled write - reset buffers
this.current_payload.reset();
this.parent.deref();
return;
}
if (result.toError(.write)) |err| {
// deref to balance processSend ref
defer this.parent.deref();
this.close();
onError(this.parent, err);
return;
}
// onWriteComplete handles the deref
this.onWriteComplete(.zero);
}
@@ -1447,10 +1428,6 @@ pub fn WindowsStreamingWriter(comptime Parent: type, function_table: anytype) ty
}
},
}
// Ref the parent to prevent it from being freed while the async
// write is in flight. The matching deref is in onWriteComplete
// or onFsWriteComplete.
this.parent.ref();
this.last_write_result = .{ .pending = 0 };
}
@@ -1465,11 +1442,10 @@ pub fn WindowsStreamingWriter(comptime Parent: type, function_table: anytype) ty
}
pub fn deinit(this: *WindowsWriter) void {
// Close the pipe first to cancel any in-flight writes before
// freeing the buffers they reference.
this.closeWithoutReporting();
// clean both buffers if needed
this.outgoing.deinit();
this.current_payload.deinit();
this.closeWithoutReporting();
}
fn writeInternal(this: *WindowsWriter, buffer: anytype, comptime writeFn: anytype) WriteResult {

View File

@@ -222,7 +222,7 @@ pub const Source = union(enum) {
switch (pipe.open(fd)) {
.err => |err| {
pipe.closeAndDestroy();
bun.default_allocator.destroy(pipe);
return .{
.err = err,
};

View File

@@ -499,23 +499,6 @@ class Database implements SqliteTypes.Database {
close(throwOnError = false) {
this.clearQueryCache();
// Finalize any prepared statements created by db.transaction()
if (controllers) {
const controller = controllers.get(this);
if (controller) {
controllers.delete(this);
const seen = new Set();
for (const ctrl of [controller.default, controller.deferred, controller.immediate, controller.exclusive]) {
if (!ctrl) continue;
for (const stmt of [ctrl.begin, ctrl.commit, ctrl.rollback, ctrl.savepoint, ctrl.release, ctrl.rollbackTo]) {
if (stmt && !seen.has(stmt)) {
seen.add(stmt);
stmt.finalize?.();
}
}
}
}
}
this.#hasClosed = true;
return SQL.close(this.#handle, throwOnError);
}

View File

@@ -5,6 +5,9 @@ const first_high_surrogate = 0xD800;
const first_low_surrogate = 0xDC00;
const last_low_surrogate = 0xDFFF;
/// For support JavaScriptCore
const ascii_only_always_on_unless_minifying = true;
fn formatUnsignedIntegerBetween(comptime len: u16, buf: *[len]u8, val: u64) void {
comptime var i: u16 = len;
var remainder = val;
@@ -23,11 +26,11 @@ pub fn writeModuleId(comptime Writer: type, writer: Writer, module_id: u32) void
std.fmt.formatInt(module_id, 16, .lower, .{}, writer) catch unreachable;
}
pub fn canPrintWithoutEscape(comptime CodePointType: type, c: CodePointType, prefers_ascii: bool) bool {
pub fn canPrintWithoutEscape(comptime CodePointType: type, c: CodePointType, comptime ascii_only: bool) bool {
if (c <= last_ascii) {
return c >= first_ascii and c != '\\' and c != '"' and c != '\'' and c != '`' and c != '$';
} else {
return !prefers_ascii and c != 0xFEFF and c != 0x2028 and c != 0x2029 and (c < first_high_surrogate or c > last_low_surrogate);
return !ascii_only and c != 0xFEFF and c != 0x2028 and c != 0x2029 and (c < first_high_surrogate or c > last_low_surrogate);
}
}
@@ -106,7 +109,7 @@ fn ws(comptime str: []const u8) Whitespacer {
return .{ .normal = Static.with, .minify = Static.without };
}
pub fn estimateLengthForUTF8(input: []const u8, comptime prefers_ascii: bool, comptime quote_char: u8) usize {
pub fn estimateLengthForUTF8(input: []const u8, comptime ascii_only: bool, comptime quote_char: u8) usize {
var remaining = input;
var len: usize = 2; // for quotes
@@ -127,7 +130,7 @@ pub fn estimateLengthForUTF8(input: []const u8, comptime prefers_ascii: bool, co
i32,
0,
);
if (canPrintWithoutEscape(i32, c, prefers_ascii)) {
if (canPrintWithoutEscape(i32, c, ascii_only)) {
len += @as(usize, char_len);
} else if (c <= 0xFFFF) {
len += 6;
@@ -142,7 +145,7 @@ pub fn estimateLengthForUTF8(input: []const u8, comptime prefers_ascii: bool, co
return len;
}
pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer: Writer, comptime quote_char: u8, prefers_ascii: bool, comptime json: bool, comptime encoding: strings.Encoding) !void {
pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer: Writer, comptime quote_char: u8, comptime ascii_only: bool, comptime json: bool, comptime encoding: strings.Encoding) !void {
const text = if (comptime encoding == .utf16) @as([]const u16, @alignCast(std.mem.bytesAsSlice(u16, text_in))) else text_in;
if (comptime json and quote_char != '"') @compileError("for json, quote_char must be '\"'");
var i: usize = 0;
@@ -180,7 +183,7 @@ pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer:
break :brk @as(i32, text[i]);
},
};
if (canPrintWithoutEscape(i32, c, prefers_ascii)) {
if (canPrintWithoutEscape(i32, c, ascii_only)) {
const remain = text[i + clamped_width ..];
switch (encoding) {
@@ -335,12 +338,12 @@ pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer:
}
}
}
pub fn quoteForJSON(text: []const u8, bytes: *MutableString, comptime prefers_ascii: bool) !void {
pub fn quoteForJSON(text: []const u8, bytes: *MutableString, comptime ascii_only: bool) !void {
const writer = bytes.writer();
try bytes.growIfNeeded(estimateLengthForUTF8(text, prefers_ascii, '"'));
try bytes.growIfNeeded(estimateLengthForUTF8(text, ascii_only, '"'));
try bytes.appendChar('"');
try writePreQuotedString(text, @TypeOf(writer), writer, '"', prefers_ascii, true, .utf8);
try writePreQuotedString(text, @TypeOf(writer), writer, '"', ascii_only, true, .utf8);
bytes.appendChar('"') catch unreachable;
}
@@ -594,6 +597,7 @@ const ImportVariant = enum {
};
fn NewPrinter(
comptime ascii_only: bool,
comptime Writer: type,
comptime rewrite_esm_to_cjs: bool,
comptime is_bun_platform: bool,
@@ -615,7 +619,6 @@ fn NewPrinter(
prev_reg_exp_end: i32 = -1,
call_target: ?Expr.Data = null,
writer: Writer,
prefers_ascii: bool,
has_printed_bundled_import_statement: bool = false,
@@ -1581,9 +1584,9 @@ fn NewPrinter(
pub fn printStringCharactersUTF8(e: *Printer, text: []const u8, quote: u8) void {
const writer = e.writer.stdWriter();
(switch (quote) {
'\'' => writePreQuotedString(text, @TypeOf(writer), writer, '\'', e.prefers_ascii, false, .utf8),
'"' => writePreQuotedString(text, @TypeOf(writer), writer, '"', e.prefers_ascii, false, .utf8),
'`' => writePreQuotedString(text, @TypeOf(writer), writer, '`', e.prefers_ascii, false, .utf8),
'\'' => writePreQuotedString(text, @TypeOf(writer), writer, '\'', ascii_only, false, .utf8),
'"' => writePreQuotedString(text, @TypeOf(writer), writer, '"', ascii_only, false, .utf8),
'`' => writePreQuotedString(text, @TypeOf(writer), writer, '`', ascii_only, false, .utf8),
else => unreachable,
}) catch |err| switch (err) {};
}
@@ -1592,9 +1595,9 @@ fn NewPrinter(
const writer = e.writer.stdWriter();
(switch (quote) {
'\'' => writePreQuotedString(slice, @TypeOf(writer), writer, '\'', e.prefers_ascii, false, .utf16),
'"' => writePreQuotedString(slice, @TypeOf(writer), writer, '"', e.prefers_ascii, false, .utf16),
'`' => writePreQuotedString(slice, @TypeOf(writer), writer, '`', e.prefers_ascii, false, .utf16),
'\'' => writePreQuotedString(slice, @TypeOf(writer), writer, '\'', ascii_only, false, .utf16),
'"' => writePreQuotedString(slice, @TypeOf(writer), writer, '"', ascii_only, false, .utf16),
'`' => writePreQuotedString(slice, @TypeOf(writer), writer, '`', ascii_only, false, .utf16),
else => unreachable,
}) catch |err| switch (err) {};
}
@@ -1705,19 +1708,13 @@ fn NewPrinter(
}
// Internal "require()" or "import()"
const has_side_effects = meta.wrapper_ref.isValid() or
meta.exports_ref.isValid() or
meta.was_unwrapped_require or
p.options.input_files_for_dev_server != null;
if (record.kind == .dynamic) {
p.printSpaceBeforeIdentifier();
p.print("Promise.resolve()");
if (has_side_effects) {
level = p.printDotThenPrefix();
}
level = p.printDotThenPrefix();
}
defer if (record.kind == .dynamic and has_side_effects) p.printDotThenSuffix();
defer if (record.kind == .dynamic) p.printDotThenSuffix();
// Make sure the comma operator is properly wrapped
const wrap_comma_operator = meta.exports_ref.isValid() and
@@ -1957,14 +1954,73 @@ fn NewPrinter(
}
}
pub inline fn canPrintIdentifierUTF16(p: *Printer, name: []const u16) bool {
if (p.prefers_ascii) {
pub inline fn canPrintIdentifierUTF16(_: *Printer, name: []const u16) bool {
if (comptime ascii_only or ascii_only_always_on_unless_minifying) {
return js_lexer.isLatin1Identifier([]const u16, name);
} else {
return js_lexer.isIdentifierUTF16(name);
}
}
fn printRawTemplateLiteral(p: *Printer, bytes: []const u8) void {
if (comptime is_json or !ascii_only) {
p.print(bytes);
return;
}
// Translate any non-ASCII to unicode escape sequences
// Note that this does not correctly handle malformed template literal strings
// template literal strings can contain invalid unicode code points
// and pretty much anything else
//
// we use WTF-8 here, but that's still not good enough.
//
var ascii_start: usize = 0;
var is_ascii = false;
var iter = CodepointIterator.init(bytes);
var cursor = CodepointIterator.Cursor{};
while (iter.next(&cursor)) {
switch (cursor.c) {
// unlike other versions, we only want to mutate > 0x7F
0...last_ascii => {
if (!is_ascii) {
ascii_start = cursor.i;
is_ascii = true;
}
},
else => {
if (is_ascii) {
p.print(bytes[ascii_start..cursor.i]);
is_ascii = false;
}
switch (cursor.c) {
0...0xFFFF => {
p.print([_]u8{
'\\',
'u',
hex_chars[cursor.c >> 12],
hex_chars[(cursor.c >> 8) & 15],
hex_chars[(cursor.c >> 4) & 15],
hex_chars[cursor.c & 15],
});
},
else => {
p.print("\\u{");
p.fmt("{x}", .{cursor.c}) catch unreachable;
p.print("}");
},
}
},
}
}
if (is_ascii) {
p.print(bytes[ascii_start..]);
}
}
pub fn printExpr(p: *Printer, expr: Expr, level: Level, in_flags: ExprFlag.Set) void {
var flags = in_flags;
@@ -2792,10 +2848,7 @@ fn NewPrinter(
p.print("`");
switch (e.head) {
.raw => |raw| {
if (p.prefers_ascii and !strings.isAllASCII(raw)) p.prefers_ascii = false;
p.print(raw);
},
.raw => |raw| p.printRawTemplateLiteral(raw),
.cooked => |*cooked| {
if (cooked.isPresent()) {
cooked.resolveRopeIfNeeded(p.options.allocator);
@@ -2809,10 +2862,7 @@ fn NewPrinter(
p.printExpr(part.value, .lowest, ExprFlag.None());
p.print("}");
switch (part.tail) {
.raw => |raw| {
if (p.prefers_ascii and !strings.isAllASCII(raw)) p.prefers_ascii = false;
p.print(raw);
},
.raw => |raw| p.printRawTemplateLiteral(raw),
.cooked => |*cooked| {
if (cooked.isPresent()) {
cooked.resolveRopeIfNeeded(p.options.allocator);
@@ -3175,11 +3225,70 @@ fn NewPrinter(
p.print(" ");
}
// RegExp literals cannot be printed ascii only because they expose a `.source` property
if (p.prefers_ascii and !strings.isAllASCII(e.value)) {
p.prefers_ascii = false;
if (comptime is_bun_platform) {
// Translate any non-ASCII to unicode escape sequences
var ascii_start: usize = 0;
var is_ascii = false;
var iter = CodepointIterator.init(e.value);
var cursor = CodepointIterator.Cursor{};
while (iter.next(&cursor)) {
switch (cursor.c) {
first_ascii...last_ascii => {
if (!is_ascii) {
ascii_start = cursor.i;
is_ascii = true;
}
},
else => {
if (is_ascii) {
p.print(e.value[ascii_start..cursor.i]);
is_ascii = false;
}
switch (cursor.c) {
0...0xFFFF => {
p.print([_]u8{
'\\',
'u',
hex_chars[cursor.c >> 12],
hex_chars[(cursor.c >> 8) & 15],
hex_chars[(cursor.c >> 4) & 15],
hex_chars[cursor.c & 15],
});
},
else => |c| {
const k = c - 0x10000;
const lo = @as(usize, @intCast(first_high_surrogate + ((k >> 10) & 0x3FF)));
const hi = @as(usize, @intCast(first_low_surrogate + (k & 0x3FF)));
p.print(&[_]u8{
'\\',
'u',
hex_chars[lo >> 12],
hex_chars[(lo >> 8) & 15],
hex_chars[(lo >> 4) & 15],
hex_chars[lo & 15],
'\\',
'u',
hex_chars[hi >> 12],
hex_chars[(hi >> 8) & 15],
hex_chars[(hi >> 4) & 15],
hex_chars[hi & 15],
});
},
}
},
}
}
if (is_ascii) {
p.print(e.value[ascii_start..]);
}
} else {
// UTF8 sequence is fine
p.print(e.value);
}
p.print(e.value);
// Need a space before the next identifier to avoid it turning into flags
p.prev_reg_exp_end = p.writer.written;
@@ -5122,7 +5231,7 @@ fn NewPrinter(
}
pub fn printIdentifier(p: *Printer, identifier: string) void {
if (p.prefers_ascii) {
if (comptime ascii_only) {
p.printIdentifierAsciiOnly(identifier);
} else {
p.print(identifier);
@@ -5174,7 +5283,7 @@ fn NewPrinter(
i += 1;
}
if (p.prefers_ascii and c > last_ascii) {
if ((comptime ascii_only) and c > last_ascii) {
switch (c) {
0...0xFFFF => {
p.print(
@@ -5307,7 +5416,6 @@ fn NewPrinter(
opts: Options,
renamer: bun.renamer.Renamer,
source_map_builder: SourceMap.Chunk.Builder,
prefers_ascii: bool,
) Printer {
var printer = Printer{
.import_records = import_records,
@@ -5315,7 +5423,6 @@ fn NewPrinter(
.writer = writer,
.renamer = renamer,
.source_map_builder = source_map_builder,
.prefers_ascii = prefers_ascii,
};
if (comptime generate_source_map) {
// This seems silly to cache but the .items() function apparently costs 1ms according to Instruments.
@@ -5802,7 +5909,7 @@ pub fn printAst(
tree: Ast,
symbols: js_ast.Symbol.Map,
source: *const logger.Source,
comptime is_bun_platform: bool,
comptime ascii_only: bool,
opts: Options,
comptime generate_source_map: bool,
) !usize {
@@ -5881,9 +5988,11 @@ pub fn printAst(
}
const PrinterType = NewPrinter(
ascii_only,
Writer,
false,
is_bun_platform,
// if it's ascii_only, it is also bun
ascii_only,
false,
generate_source_map,
);
@@ -5894,8 +6003,7 @@ pub fn printAst(
tree.import_records.slice(),
opts,
renamer,
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, is_bun_platform, opts, source, &tree),
is_bun_platform,
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, ascii_only, opts, source, &tree),
);
defer {
if (comptime generate_source_map) {
@@ -5985,7 +6093,7 @@ pub fn printJSON(
source: *const logger.Source,
opts: Options,
) !usize {
const PrinterType = NewPrinter(Writer, false, false, true, false);
const PrinterType = NewPrinter(false, Writer, false, false, true, false);
const writer = _writer;
var s_expr = S.SExpr{ .value = expr };
const stmt = Stmt{ .loc = logger.Loc.Empty, .data = .{
@@ -6004,7 +6112,6 @@ pub fn printJSON(
opts,
renamer.toRenamer(),
undefined,
false,
);
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
printer.binary_expression_stack = std.array_list.Managed(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());
@@ -6096,6 +6203,8 @@ pub fn printWithWriterAndPlatform(
bun.crash_handler.current_action = .{ .print = source.path.text };
const PrinterType = NewPrinter(
// if it's bun, it is also ascii_only
is_bun_platform,
Writer,
false,
is_bun_platform,
@@ -6108,7 +6217,6 @@ pub fn printWithWriterAndPlatform(
opts,
renamer,
getSourceMapBuilder(if (generate_source_maps) .eager else .disable, is_bun_platform, opts, source, &ast),
is_bun_platform,
);
printer.was_lazy_export = ast.has_lazy_export;
if (PrinterType.may_have_module_info) {
@@ -6178,7 +6286,7 @@ pub fn printCommonJS(
tree: Ast,
symbols: js_ast.Symbol.Map,
source: *const logger.Source,
comptime prefers_ascii: bool,
comptime ascii_only: bool,
opts: Options,
comptime generate_source_map: bool,
) !usize {
@@ -6186,7 +6294,7 @@ pub fn printCommonJS(
defer bun.crash_handler.current_action = prev_action;
bun.crash_handler.current_action = .{ .print = source.path.text };
const PrinterType = NewPrinter(Writer, true, false, false, generate_source_map);
const PrinterType = NewPrinter(ascii_only, Writer, true, false, false, generate_source_map);
const writer = _writer;
var renamer = rename.NoOpRenamer.init(symbols, source);
var printer = PrinterType.init(
@@ -6195,7 +6303,6 @@ pub fn printCommonJS(
opts,
renamer.toRenamer(),
getSourceMapBuilder(if (generate_source_map) .lazy else .disable, false, opts, source, &tree),
prefers_ascii,
);
var bin_stack_heap = std.heap.stackFallback(1024, bun.default_allocator);
printer.binary_expression_stack = std.array_list.Managed(PrinterType.BinaryExpressionVisitor).init(bin_stack_heap.get());

View File

@@ -731,22 +731,23 @@ pub const Route = struct {
if (abs_path_str.len == 0) {
var file: std.fs.File = undefined;
var needs_close = true;
var needs_close = false;
defer if (needs_close) file.close();
if (entry.cache.fd.unwrapValid()) |valid| {
file = valid.stdFile();
needs_close = false;
} else {
var parts = [_]string{ entry.dir, entry.base() };
abs_path_str = FileSystem.instance.absBuf(&parts, &route_file_buf);
route_file_buf[abs_path_str.len] = 0;
const buf = route_file_buf[0..abs_path_str.len :0];
file = std.fs.openFileAbsoluteZ(buf, .{ .mode = .read_only }) catch |err| {
needs_close = false;
log.addErrorFmt(null, Logger.Loc.Empty, allocator, "{s} opening route: {s}", .{ @errorName(err), abs_path_str }) catch unreachable;
return null;
};
FileSystem.setMaxFd(file.handle);
needs_close = FileSystem.instance.fs.needToCloseFiles();
if (!needs_close) entry.cache.fd = .fromStdFile(file);
}
const _abs = bun.getFdPath(.fromStdFile(file), &route_file_buf) catch |err| {

View File

@@ -12,79 +12,27 @@ state: union(enum) {
pub fn start(this: *Echo) Yield {
var args = this.bltn().argsSlice();
const no_newline = args.len >= 1 and std.mem.eql(u8, bun.sliceTo(args[0], 0), "-n");
// Parse flags: echo accepts -n, -e, -E in any combination.
// Flag parsing stops at the first arg that doesn't start with '-'
// or contains an invalid flag character.
var no_newline = false;
var escape_sequences = false;
var flags_done = false;
var args_start: usize = 0;
for (args) |arg| {
if (flags_done) break;
const flag = std.mem.span(arg);
if (flag.len < 2 or flag[0] != '-') {
flags_done = true;
break;
}
// Validate all characters are valid echo flags
var valid = true;
for (flag[1..]) |c| {
switch (c) {
'n', 'e', 'E' => {},
else => {
valid = false;
break;
},
}
}
if (!valid) {
flags_done = true;
break;
}
// Apply flags (last -e/-E wins)
for (flag[1..]) |c| {
switch (c) {
'n' => no_newline = true,
'e' => escape_sequences = true,
'E' => escape_sequences = false,
else => unreachable,
}
}
args_start += 1;
}
args = args[args_start..];
args = args[if (no_newline) 1 else 0..];
const args_len = args.len;
var has_leading_newline: bool = false;
var stop_output = false;
// TODO: Should flush buffer after it gets to a certain size
for (args, 0..) |arg, i| {
if (stop_output) break;
const thearg = std.mem.span(arg);
const is_last = i == args_len - 1;
if (escape_sequences) {
stop_output = appendWithEscapes(&this.output, thearg);
} else {
if (is_last) {
if (thearg.len > 0 and thearg[thearg.len - 1] == '\n') {
has_leading_newline = true;
}
bun.handleOom(this.output.appendSlice(bun.strings.trimSubsequentLeadingChars(thearg, '\n')));
} else {
bun.handleOom(this.output.appendSlice(thearg));
}
}
if (!stop_output and !is_last) {
if (i < args_len - 1) {
bun.handleOom(this.output.appendSlice(thearg));
bun.handleOom(this.output.append(' '));
} else {
if (thearg.len > 0 and thearg[thearg.len - 1] == '\n') {
has_leading_newline = true;
}
bun.handleOom(this.output.appendSlice(bun.strings.trimSubsequentLeadingChars(thearg, '\n')));
}
}
if (!stop_output and !has_leading_newline and !no_newline) bun.handleOom(this.output.append('\n'));
if (!has_leading_newline and !no_newline) bun.handleOom(this.output.append('\n'));
if (this.bltn().stdout.needsIO()) |safeguard| {
this.state = .waiting;
@@ -95,109 +43,6 @@ pub fn start(this: *Echo) Yield {
return this.bltn().done(0);
}
/// Appends `input` to `output`, interpreting backslash escape sequences.
/// Returns true if a \c escape was encountered (meaning stop all output).
fn appendWithEscapes(output: *std.array_list.Managed(u8), input: []const u8) bool {
var i: usize = 0;
while (i < input.len) {
if (input[i] == '\\' and i + 1 < input.len) {
switch (input[i + 1]) {
'\\' => {
bun.handleOom(output.append('\\'));
i += 2;
},
'a' => {
bun.handleOom(output.append('\x07'));
i += 2;
},
'b' => {
bun.handleOom(output.append('\x08'));
i += 2;
},
'c' => {
// \c: produce no further output
return true;
},
'e', 'E' => {
bun.handleOom(output.append('\x1b'));
i += 2;
},
'f' => {
bun.handleOom(output.append('\x0c'));
i += 2;
},
'n' => {
bun.handleOom(output.append('\n'));
i += 2;
},
'r' => {
bun.handleOom(output.append('\r'));
i += 2;
},
't' => {
bun.handleOom(output.append('\t'));
i += 2;
},
'v' => {
bun.handleOom(output.append('\x0b'));
i += 2;
},
'0' => {
// \0nnn: octal value (up to 3 octal digits)
i += 2; // skip \0
var val: u8 = 0;
var digits: usize = 0;
while (digits < 3 and i < input.len and input[i] >= '0' and input[i] <= '7') {
val = val *% 8 +% (input[i] - '0');
i += 1;
digits += 1;
}
bun.handleOom(output.append(val));
},
'x' => {
// \xHH: hex value (up to 2 hex digits)
i += 2; // skip \x
var val: u8 = 0;
var digits: usize = 0;
while (digits < 2 and i < input.len) {
const hex_val = hexDigitValue(input[i]);
if (hex_val) |hv| {
val = val *% 16 +% hv;
i += 1;
digits += 1;
} else {
break;
}
}
if (digits > 0) {
bun.handleOom(output.append(val));
} else {
// No valid hex digits: output \x literally
bun.handleOom(output.appendSlice("\\x"));
}
},
else => {
// Unknown escape: output backslash and the character as-is
bun.handleOom(output.append('\\'));
bun.handleOom(output.append(input[i + 1]));
i += 2;
},
}
} else {
bun.handleOom(output.append(input[i]));
i += 1;
}
}
return false;
}
fn hexDigitValue(c: u8) ?u8 {
if (c >= '0' and c <= '9') return c - '0';
if (c >= 'a' and c <= 'f') return c - 'a' + 10;
if (c >= 'A' and c <= 'F') return c - 'A' + 10;
return null;
}
pub fn onIOWriterChunk(this: *Echo, _: usize, e: ?jsc.SystemError) Yield {
if (comptime bun.Environment.allow_assert) {
assert(this.state == .waiting or this.state == .waiting_write_err);

View File

@@ -3251,15 +3251,11 @@ pub fn NewLexer(comptime encoding: StringEncoding) type {
self.chars.current = .{ .char = cur_ascii_char };
return;
}
// Set the cursor to decode the codepoint at new_idx.
// Use width=0 so that nextCursor (which computes pos = width + i)
// starts reading from exactly new_idx.
self.chars.src.cursor = CodepointIterator.Cursor{
.i = @intCast(new_idx),
.c = 0,
.width = 0,
.c = cur_ascii_char,
.width = 1,
};
SrcUnicode.nextCursor(&self.chars.src.iter, &self.chars.src.cursor);
self.chars.src.next_cursor = self.chars.src.cursor;
SrcUnicode.nextCursor(&self.chars.src.iter, &self.chars.src.next_cursor);
if (prev_ascii_char) |pc| self.chars.prev = .{ .char = pc };
@@ -3606,13 +3602,13 @@ pub fn ShellCharIter(comptime encoding: StringEncoding) type {
return bytes[self.src.i..];
}
if (self.src.cursor.i >= bytes.len) return "";
return bytes[self.src.cursor.i..];
if (self.src.iter.i >= bytes.len) return "";
return bytes[self.src.iter.i..];
}
pub fn cursorPos(self: *@This()) usize {
if (comptime encoding == .ascii) return self.src.i;
return self.src.cursor.i;
return self.src.iter.i;
}
pub fn eat(self: *@This()) ?InputChar {

View File

@@ -86,17 +86,6 @@ const templateStringTests: Record<string, TemplateStringTest> = {
StringAddition4: { expr: "`${1}z` + `\u2796${Number(1)}rest`", print: true },
StringAddition5: { expr: "`\u2796${1}z` + `\u2796${Number(1)}rest`", print: true },
StringAddition6: { expr: "`${1}` + '\u2796rest'", print: true },
TaggedTemplate1: { expr: "String.raw`one${'two'}three${'four'}`", print: true },
TaggedTemplate2: { expr: "String.raw`abc`", print: true },
TaggedTemplate3: { expr: "String.raw`\\n`", print: true },
TaggedTemplate4: { expr: "String.raw`\n`", print: true },
TaggedTemplate5: { expr: "String.raw`æ`", print: true },
TaggedTemplate6: { expr: "String.raw`\\xE6`", print: true },
TaggedTemplate7: { expr: 'String.raw`\xE6${"one"}`', print: true },
TaggedTemplate8: { expr: 'String.raw`\u{10334}${"two"}→`', print: true },
TaggedTemplate9: { expr: 'String.raw`\\u{10334}${"two"}→`', print: true },
RegExp1: { expr: "/æ/.source", print: true },
RegExp2: { expr: "/\xE6/.source", print: true },
};
describe("bundler", () => {

View File

@@ -1778,12 +1778,6 @@ console.log(<div {...obj} key="after" />);`),
expectPrintedMin_(`console.log("\\uD800" + "\\uDF34")`, 'console.log("\\uD800" + "\\uDF34")');
});
it("tagged template literal", () => {
expectPrinted_("String.raw`b`", "String.raw`b`");
expectPrinted_("String.raw`\\n`", "String.raw`\\n`");
expectPrinted_("String.raw`\n`", "String.raw`\n`");
});
it("fold string addition", () => {
expectPrinted_(
`

View File

@@ -294,7 +294,10 @@ describe("bunshell", () => {
test("escape unicode", async () => {
const { stdout } = await $`echo \\\\`;
expect(stdout.toString("utf8")).toEqual(`\\\\\n`);
// TODO: Uncomment and replace after unicode in template tags is supported
// expect(stdout.toString("utf8")).toEqual(`\弟\気\n`);
// Set this here for now, because unicode in template tags while using .raw is broken, but should be fixed
expect(stdout.toString("utf8")).toEqual("\\u5F1F\\u6C17\n");
});
/**

View File

@@ -61,7 +61,11 @@ describe("echo error handling", async () => {
});
describe("echo special cases", async () => {
TestBuilder.command`echo -n -n hello`.exitCode(0).stdout("hello").stderr("").runAsTest("-n flag with -n as argument");
TestBuilder.command`echo -n -n hello`
.exitCode(0)
.stdout("-n hello")
.stderr("")
.runAsTest("-n flag with -n as argument");
TestBuilder.command`echo -- -n hello`
.exitCode(0)

View File

@@ -1,140 +0,0 @@
import { heapStats } from "bun:jsc";
import { expect, test } from "bun:test";
// Test that ReadableStream objects from cancelled fetch responses are properly GC'd.
//
// When a streaming HTTP response body is cancelled mid-stream, FetchTasklet's
// readable_stream_ref (a Strong GC root) is not released because:
// 1. ByteStream.onCancel() doesn't notify the FetchTasklet
// 2. The HTTP connection stays open, so has_more never becomes false
// 3. Bun__FetchResponse_finalize sees the Strong ref and skips cleanup
//
// This creates a circular dependency where the Strong ref prevents GC,
// and the GC finalizer skips cleanup because the Strong ref exists.
test("ReadableStream from fetch should be GC'd after reader.cancel()", async () => {
// Use a raw TCP server to avoid server-side JS ReadableStream objects
// that would add noise to objectTypeCounts.
// The server sends one HTTP chunk immediately, then keeps the connection open.
using server = Bun.listen({
port: 0,
hostname: "127.0.0.1",
socket: {
data(socket) {
socket.write(
"HTTP/1.1 200 OK\r\n" +
"Transfer-Encoding: chunked\r\n" +
"Connection: keep-alive\r\n" +
"\r\n" +
"400\r\n" +
Buffer.alloc(0x400, "x").toString() +
"\r\n",
);
// Don't send terminal chunk "0\r\n\r\n" — keep connection open
},
open() {},
close() {},
error() {},
},
});
const url = `http://127.0.0.1:${server.port}/`;
const N = 30;
// Warmup: ensure JIT, lazy init, and connection pool are warmed up
for (let i = 0; i < 5; i++) {
const response = await fetch(url);
const reader = response.body!.getReader();
await reader.read();
await reader.cancel();
}
Bun.gc(true);
await Bun.sleep(10);
Bun.gc(true);
const baseline = heapStats().objectTypeCounts.ReadableStream ?? 0;
// Main test: fetch, read one chunk, cancel, repeat N times
for (let i = 0; i < N; i++) {
const response = await fetch(url);
const reader = response.body!.getReader();
await reader.read();
await reader.cancel();
}
// Allow finalizers to run, then GC aggressively
Bun.gc(true);
await Bun.sleep(50);
Bun.gc(true);
await Bun.sleep(50);
Bun.gc(true);
const after = heapStats().objectTypeCounts.ReadableStream ?? 0;
const leaked = after - baseline;
// With the bug: leaked ≈ N (each cancelled stream's Strong ref prevents GC)
// When fixed: leaked should be near 0 (Strong ref released on cancel)
expect(leaked).toBeLessThanOrEqual(5);
});
test("ReadableStream from fetch should be GC'd after body.cancel()", async () => {
using server = Bun.listen({
port: 0,
hostname: "127.0.0.1",
socket: {
data(socket) {
socket.write(
"HTTP/1.1 200 OK\r\n" +
"Transfer-Encoding: chunked\r\n" +
"Connection: keep-alive\r\n" +
"\r\n" +
"400\r\n" +
Buffer.alloc(0x400, "x").toString() +
"\r\n",
);
},
open() {},
close() {},
error() {},
},
});
const url = `http://127.0.0.1:${server.port}/`;
const N = 30;
// Warmup
for (let i = 0; i < 5; i++) {
const response = await fetch(url);
const reader = response.body!.getReader();
await reader.read();
reader.releaseLock();
await response.body!.cancel();
}
Bun.gc(true);
await Bun.sleep(10);
Bun.gc(true);
const baseline = heapStats().objectTypeCounts.ReadableStream ?? 0;
// Main test: fetch, read, releaseLock, cancel body directly
for (let i = 0; i < N; i++) {
const response = await fetch(url);
const reader = response.body!.getReader();
await reader.read();
reader.releaseLock();
await response.body!.cancel();
}
Bun.gc(true);
await Bun.sleep(50);
Bun.gc(true);
await Bun.sleep(50);
Bun.gc(true);
const after = heapStats().objectTypeCounts.ReadableStream ?? 0;
const leaked = after - baseline;
expect(leaked).toBeLessThanOrEqual(5);
});

View File

@@ -430,5 +430,3 @@ test/js/bun/test/parallel/test-http-should-not-accept-untrusted-certificates.ts
# Need to run the event loop once more to ensure sockets close
test/js/node/test/parallel/test-https-localaddress-bind-error.js
test/js/node/test/parallel/test-crypto-op-during-process-exit.js
test/js/third_party/prisma/prisma.test.ts

View File

@@ -1,51 +0,0 @@
import { Database } from "bun:sqlite";
import { expect, test } from "bun:test";
test("db.close(true) works after db.transaction()", () => {
const db = new Database(":memory:");
db.transaction(() => {})();
expect(() => db.close(true)).not.toThrow();
});
test("db.close(true) works after db.transaction() with actual work", () => {
const db = new Database(":memory:");
db.run("CREATE TABLE test (id INTEGER PRIMARY KEY, value TEXT)");
const insert = db.transaction((items: string[]) => {
const stmt = db.query("INSERT INTO test (value) VALUES (?)");
for (const item of items) {
stmt.run(item);
}
});
insert(["a", "b", "c"]);
expect(db.query("SELECT COUNT(*) as count FROM test").get()).toEqual({ count: 3 });
expect(() => db.close(true)).not.toThrow();
});
test("using declaration works with db.transaction()", () => {
using db = new Database(":memory:");
db.transaction(() => {})();
// Symbol.dispose calls close(true), should not throw
});
test("db.close(true) works after multiple transaction types", () => {
const db = new Database(":memory:");
db.transaction(() => {})();
db.transaction(() => {}).deferred();
db.transaction(() => {}).immediate();
db.transaction(() => {}).exclusive();
expect(() => db.close(true)).not.toThrow();
});
test("db.close(true) works after nested transactions", () => {
const db = new Database(":memory:");
db.run("CREATE TABLE test (id INTEGER PRIMARY KEY)");
const outer = db.transaction(() => {
db.run("INSERT INTO test (id) VALUES (1)");
const inner = db.transaction(() => {
db.run("INSERT INTO test (id) VALUES (2)");
});
inner();
});
outer();
expect(() => db.close(true)).not.toThrow();
});

View File

@@ -70,7 +70,7 @@ test("constant-folded equals doesn't lie", async () => {
console.log("\"" === '"');
});
test("template literal raw property with unicode in an ascii-only build", async () => {
test.skip("template literal raw property with unicode in an ascii-only build", async () => {
expect(String.raw`你好𐃘\\`).toBe("你好𐃘\\\\");
expect((await $`echo 你好𐃘`.text()).trim()).toBe("你好𐃘");
});

View File

@@ -1,43 +0,0 @@
import { $ } from "bun";
import { expect, test } from "bun:test";
// https://github.com/oven-sh/bun/issues/17244
// Shell template literals leaked __bunstr_N when the first interpolated value
// contained a space and a subsequent value contained a multi-byte UTF-8 character.
test("shell interpolation with space and multi-byte UTF-8", async () => {
const a = " ";
const b = "Í";
const result = await $`echo ${a} ${b}`.text();
expect(result.trim()).toBe("Í");
expect(result).not.toContain("__bunstr");
});
test("shell interpolation with trailing-space string and 2-byte UTF-8", async () => {
const a = "a ";
const b = "Í";
const result = await $`echo ${a} ${b}`.text();
// "a " (with trailing space preserved) + " " (template separator) + "Í"
expect(result.trim()).toBe("a Í");
expect(result).not.toContain("__bunstr");
});
test("shell interpolation with space and 3-byte UTF-8", async () => {
const a = " ";
const b = "€";
const result = await $`echo ${a} ${b}`.text();
expect(result.trim()).toBe("€");
expect(result).not.toContain("__bunstr");
});
test("shell interpolation with embedded space and multi-byte UTF-8", async () => {
const a = "a b";
const b = "Í";
const result = await $`echo ${a} ${b}`.text();
expect(result.trim()).toBe("a b Í");
expect(result).not.toContain("__bunstr");
});

View File

@@ -1,117 +0,0 @@
import { $ } from "bun";
import { describe, expect, test } from "bun:test";
describe("echo -e flag support", () => {
test("echo -e does not output -e as literal text", async () => {
const result = await $`echo -e hello`.text();
expect(result).toBe("hello\n");
});
test("echo -e interprets backslash-n", async () => {
const result = await $`echo -e ${"hello\\nworld"}`.text();
expect(result).toBe("hello\nworld\n");
});
test("echo -e interprets backslash-t", async () => {
const result = await $`echo -e ${"hello\\tworld"}`.text();
expect(result).toBe("hello\tworld\n");
});
test("echo -e interprets backslash-backslash", async () => {
const result = await $`echo -e ${"hello\\\\world"}`.text();
expect(result).toBe("hello\\world\n");
});
test("echo -e interprets \\a (bell)", async () => {
const result = await $`echo -e ${"\\a"}`.text();
expect(result).toBe("\x07\n");
});
test("echo -e interprets \\b (backspace)", async () => {
const result = await $`echo -e ${"a\\bb"}`.text();
expect(result).toBe("a\bb\n");
});
test("echo -e interprets \\r (carriage return)", async () => {
const result = await $`echo -e ${"hello\\rworld"}`.text();
expect(result).toBe("hello\rworld\n");
});
test("echo -e interprets \\f (form feed)", async () => {
const result = await $`echo -e ${"\\f"}`.text();
expect(result).toBe("\f\n");
});
test("echo -e interprets \\v (vertical tab)", async () => {
const result = await $`echo -e ${"\\v"}`.text();
expect(result).toBe("\v\n");
});
test("echo -e interprets \\0nnn (octal)", async () => {
// \0101 = 'A' (65 decimal)
const result = await $`echo -e ${"\\0101"}`.text();
expect(result).toBe("A\n");
});
test("echo -e interprets \\xHH (hex)", async () => {
// \x41 = 'A'
const result = await $`echo -e ${"\\x41\\x42\\x43"}`.text();
expect(result).toBe("ABC\n");
});
test("echo -e \\c stops output", async () => {
const result = await $`echo -e ${"hello\\cworld"}`.text();
expect(result).toBe("hello");
});
test("echo -e with \\e (escape character)", async () => {
const result = await $`echo -e ${"\\e"}`.text();
expect(result).toBe("\x1b\n");
});
test("echo -E disables escape interpretation", async () => {
const result = await $`echo -E ${"hello\\nworld"}`.text();
expect(result).toBe("hello\\nworld\n");
});
test("echo -eE (last wins: -E disables)", async () => {
const result = await $`echo -eE ${"hello\\tworld"}`.text();
expect(result).toBe("hello\\tworld\n");
});
test("echo -Ee (last wins: -e enables)", async () => {
const result = await $`echo -Ee ${"hello\\tworld"}`.text();
expect(result).toBe("hello\tworld\n");
});
test("echo -ne (no newline + escapes)", async () => {
const result = await $`echo -ne ${"hello\\tworld"}`.text();
expect(result).toBe("hello\tworld");
});
test("echo -en (same as -ne)", async () => {
const result = await $`echo -en ${"hello\\tworld"}`.text();
expect(result).toBe("hello\tworld");
});
test("echo -n still works (no newline)", async () => {
const result = await $`echo -n hello`.text();
expect(result).toBe("hello");
});
test("echo with invalid flag outputs literally", async () => {
const result = await $`echo -x hello`.text();
expect(result).toBe("-x hello\n");
});
test("echo -e piped to cat (original issue scenario)", async () => {
const pw = "mypassword";
const result = await $`echo -e ${pw} | cat`.text();
expect(result).toBe("mypassword\n");
});
test("echo without -e still works normally", async () => {
const result = await $`echo hello world`.text();
expect(result).toBe("hello world\n");
});
});

View File

@@ -1,62 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("Bun.build works multiple times after FileSystemRouter is created", async () => {
using dir = tempDir("issue-18242", {
"pages/index.ts": `console.log("Hello via Bun!");`,
"build.ts": `
import path from "path";
const PAGES_DIR = path.resolve(process.cwd(), "pages");
const srcRouter = new Bun.FileSystemRouter({
dir: PAGES_DIR,
style: "nextjs",
});
const entrypoints = Object.values(srcRouter.routes);
const result1 = await Bun.build({
entrypoints,
outdir: "dist/browser",
});
const result2 = await Bun.build({
entrypoints,
outdir: "dist/bun",
target: "bun",
});
const result3 = await Bun.build({
entrypoints,
outdir: "dist/third",
});
console.log(JSON.stringify({
build1: result1.success,
build2: result2.success,
build3: result3.success,
build2Logs: result2.logs.map(String),
build3Logs: result3.logs.map(String),
}));
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "build.ts"],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
const result = JSON.parse(stdout.trim());
expect(result.build1).toBe(true);
expect(result.build2).toBe(true);
expect(result.build3).toBe(true);
expect(result.build2Logs).toEqual([]);
expect(result.build3Logs).toEqual([]);
expect(exitCode).toBe(0);
});

View File

@@ -1,97 +0,0 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
describe("issue #20546 - CSS @layer declarations should be stripped from source files", () => {
test("separate @layer statements with @import layer()", async () => {
using dir = tempDir("css-layer-20546", {
"main.css": /* css */ `
@layer one;
@layer two;
@layer three;
@import url('./a.css') layer(one);
@import url('./b.css') layer(two);
@import url('./c.css') layer(three);
`,
"a.css": /* css */ `body { margin: 0; }`,
"b.css": /* css */ `h1 { font-family: sans-serif; }`,
"c.css": /* css */ `.text-centered { text-align: center; }`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "build", "./main.css", "--outdir=out"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).toBe("");
const outCss = await Bun.file(`${dir}/out/main.css`).text();
// @layer declarations should appear at the top (hoisted or as part of the layer blocks)
// @import statements should NOT appear in the output (they've been inlined)
expect(outCss).not.toContain("@import");
// The bare @layer declarations should not be duplicated at the bottom
// They should either be hoisted to the top or removed entirely since
// the layer blocks establish the same ordering
const layerOneStatements = outCss.match(/@layer one;/g);
const layerTwoStatements = outCss.match(/@layer two;/g);
const layerThreeStatements = outCss.match(/@layer three;/g);
// Each @layer declaration should appear at most once (hoisted)
expect((layerOneStatements ?? []).length).toBeLessThanOrEqual(1);
expect((layerTwoStatements ?? []).length).toBeLessThanOrEqual(1);
expect((layerThreeStatements ?? []).length).toBeLessThanOrEqual(1);
// The actual layer block content should be present
expect(outCss).toContain("margin: 0");
expect(outCss).toContain("font-family: sans-serif");
expect(outCss).toContain("text-align: center");
expect(exitCode).toBe(0);
});
test("comma syntax @layer statement with @import layer()", async () => {
using dir = tempDir("css-layer-20546-comma", {
"main.css": /* css */ `
@layer one, two, three;
@import url('./a.css') layer(one);
@import url('./b.css') layer(two);
@import url('./c.css') layer(three);
`,
"a.css": /* css */ `body { margin: 0; }`,
"b.css": /* css */ `h1 { font-family: sans-serif; }`,
"c.css": /* css */ `.text-centered { text-align: center; }`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "build", "./main.css", "--outdir=out"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(stderr).toBe("");
const outCss = await Bun.file(`${dir}/out/main.css`).text();
// @import statements should NOT appear in the output
expect(outCss).not.toContain("@import");
// The actual layer block content should be present
expect(outCss).toContain("margin: 0");
expect(outCss).toContain("font-family: sans-serif");
expect(outCss).toContain("text-align: center");
expect(exitCode).toBe(0);
});
});

View File

@@ -1,49 +0,0 @@
import { expect, test } from "bun:test";
import { tempDir } from "harness";
test("bun build produces valid JS for unused dynamic imports", async () => {
using dir = tempDir("issue-24709", {
"void-import.ts": `
export function main() {
void import("./dep.ts");
}
`,
"bare-import.ts": `
export function main() {
import("./dep.ts");
}
`,
"dep.ts": `export const x = 1;`,
});
const transpiler = new Bun.Transpiler();
// Test void import("...")
{
const result = await Bun.build({
entrypoints: [`${dir}/void-import.ts`],
});
expect(result.success).toBe(true);
const output = await result.outputs[0].text();
// The output must not contain an empty arrow function body like "() => )"
expect(output).not.toContain("() => )");
// Validate the output is syntactically valid JS by scanning it
expect(() => transpiler.scanImports(output)).not.toThrow();
}
// Test bare import("...")
{
const result = await Bun.build({
entrypoints: [`${dir}/bare-import.ts`],
});
expect(result.success).toBe(true);
const output = await result.outputs[0].text();
expect(output).not.toContain("() => )");
expect(() => transpiler.scanImports(output)).not.toThrow();
}
});

View File

@@ -1,28 +0,0 @@
import { test } from "bun:test";
test("issue #27099", async () => {
// Run it twice to trigger ASAN.
await run();
await run();
});
async function run() {
const fileOps = Array.from({ length: 10 }, () => Bun.file("/tmp/nope").exists());
const outer = Bun.spawn(["bash", "-c", 'for j in $(seq 1 100); do echo "padding padding padding"; done'], {
stdout: "pipe",
stderr: "pipe",
});
const outerText = new Response(outer.stdout as ReadableStream).text();
const inner = Bun.spawn(["cat"], {
stdin: new Response(Buffer.allocUnsafe(20000).fill("a").toString()),
stdout: "pipe",
});
await new Response(inner.stdout as ReadableStream).text();
await inner.exited;
await outerText;
await outer.exited;
await Promise.all(fileOps);
}

View File

@@ -1,39 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("CSS bundler should not drop :root rule before @property", async () => {
using dir = tempDir("css-property-root-dedup", {
"input.css": `:root {
--bar: 1;
}
@property --foo {
syntax: "<number>";
inherits: true;
initial-value: 0;
}
:root {
--baz: 2;
}
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "build", "input.css", "--outdir", "out"],
env: bunEnv,
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
const output = await Bun.file(`${dir}/out/input.css`).text();
// Both :root blocks must be preserved — they cannot be merged across the @property boundary
expect(output).toContain("--bar: 1");
expect(output).toContain("--baz: 2");
expect(output).toContain("@property --foo");
expect(exitCode).toBe(0);
});

View File

@@ -1,34 +0,0 @@
import { expect, test } from "bun:test";
test("issue #8207 - regex source string replacement with UTF-16 character", () => {
// This tests the case where parsel-js does string replacement on a regex source
// The ¶ character (U+00B6) should be replaceable in the regex source string
const regex = /:(?<name>[-\w\P{ASCII}]+)(?:\((?<argument>*)\))?/gu;
// Get the source and try to replace ¶ with .*
const source = regex.source;
const replaced = source.replace("(?<argument>¶*)", "(?<argument>.*)");
// The replacement should work - the ¶ character should be found and replaced
expect(replaced).not.toBe(source);
expect(replaced).toContain("(?<argument>.*)");
expect(replaced).not.toContain("(?<argument>¶*)");
// Verify the new regex can be created successfully
const newRegex = new RegExp(replaced, "gu");
expect(newRegex).toBeInstanceOf(RegExp);
});
test("issue #8207 - regex with UTF-16 character in source", () => {
// Additional test to ensure the regex itself works correctly
const regex = /:(?<name>[-\w\P{ASCII}]+)(?:\((?<argument>*)\))?/gu;
// Test matching with the original regex
const match1 = ":test(¶¶¶)".match(regex);
expect(match1).toBeTruthy();
expect(match1[0]).toBe(":test(¶¶¶)");
const match2 = ":name".match(regex);
expect(match2).toBeTruthy();
expect(match2[0]).toBe(":name");
});

View File

@@ -1,163 +0,0 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
const expected_stdout = new TextDecoder().decode(
new Uint8Array([195, 166, 226, 132, 162, 229, 188, 159, 230, 176, 151, 240, 159, 145, 139]),
);
const fixture = `console.log(String.raw\`æ™弟気👋\`);`;
const dir = tempDirWithFiles("run directly", {
"fixture.ts": fixture,
"requires_rtc_fixture.ts": fixture + " ".repeat(16 * 1024 * 1024),
});
test("run directly", async () => {
const result = Bun.spawnSync({
cmd: [bunExe(), "fixture.ts"],
cwd: dir,
env: bunEnv,
stdio: ["pipe", "pipe", "pipe"],
});
expect({
stdout: result.stdout.toString().trim(),
stderr: result.stderr.toString().trim(),
exitCode: result.exitCode,
}).toEqual({
stdout: expected_stdout,
stderr: "",
exitCode: 0,
});
});
test("build js then run", async () => {
const result_built = Bun.spawnSync({
cmd: [bunExe(), "build", "--target", "bun", "--outfile", "build/fixture.js", "fixture.ts"],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
expect(result_built.exitCode).toBe(0);
const result = Bun.spawnSync({
cmd: [bunExe(), "build/fixture.js"],
cwd: dir,
env: bunEnv,
stdio: ["pipe", "pipe", "pipe"],
});
expect({
stdout: result.stdout.toString().trim(),
stderr: result.stderr.toString().trim(),
exitCode: result.exitCode,
}).toEqual({
stdout: expected_stdout,
stderr: "",
exitCode: 0,
});
});
test("build min js then run", async () => {
const result_built = Bun.spawnSync({
cmd: [bunExe(), "build", "--target", "bun", "--minify", "--outfile", "build/fixture-min.js", "fixture.ts"],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
expect(result_built.exitCode).toBe(0);
const result = Bun.spawnSync({
cmd: [bunExe(), "build/fixture-min.js"],
cwd: dir,
env: bunEnv,
stdio: ["pipe", "pipe", "pipe"],
});
expect({
stdout: result.stdout.toString().trim(),
stderr: result.stderr.toString().trim(),
exitCode: result.exitCode,
}).toEqual({
stdout: expected_stdout,
stderr: "",
exitCode: 0,
});
});
// It's not clear what the cutoff is to the runtime transpiler cache
// https://github.com/oven-sh/bun/blob/b960677f5f99de7adf7b84fb8b4c8e1a97ff9e55/src/bun.js/RuntimeTranspilerCache.zig#L17
test("run directly (requires rtc)", async () => {
const result = Bun.spawnSync({
cmd: [bunExe(), "requires_rtc_fixture.ts"],
cwd: dir,
env: bunEnv,
stdio: ["pipe", "pipe", "pipe"],
});
expect(result.stdout.toString().trim()).toEqual(expected_stdout);
expect(result.exitCode).toBe(0);
});
test("build js then run (requires rtc)", async () => {
const result_built = Bun.spawnSync({
cmd: [
bunExe(),
"build",
"--target",
"bun",
"--outfile",
"build/requires_rtc_fixture.js",
"requires_rtc_fixture.ts",
],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
expect(result_built.exitCode).toBe(0);
const result = Bun.spawnSync({
cmd: [bunExe(), "build/requires_rtc_fixture.js"],
cwd: dir,
env: bunEnv,
stdio: ["pipe", "pipe", "pipe"],
});
expect({
stdout: result.stdout.toString().trim(),
stderr: result.stderr.toString().trim(),
exitCode: result.exitCode,
}).toEqual({
stdout: expected_stdout,
stderr: "",
exitCode: 0,
});
});
test("build min js then run (requires rtc)", async () => {
const result_built = Bun.spawnSync({
cmd: [
bunExe(),
"build",
"--target",
"bun",
"--minify",
"--outfile",
"build/requires_rtc_fixture-min.js",
"requires_rtc_fixture.ts",
],
cwd: dir,
env: bunEnv,
stdio: ["inherit", "inherit", "inherit"],
});
expect(result_built.exitCode).toBe(0);
const result = Bun.spawnSync({
cmd: [bunExe(), "build/requires_rtc_fixture-min.js"],
cwd: dir,
env: bunEnv,
stdio: ["pipe", "pipe", "pipe"],
});
expect({
stdout: result.stdout.toString().trim(),
stderr: result.stderr.toString().trim(),
exitCode: result.exitCode,
}).toEqual({
stdout: expected_stdout,
stderr: "",
exitCode: 0,
});
});