diff --git a/cmake/targets/BuildMimalloc.cmake b/cmake/targets/BuildMimalloc.cmake index f406d7e36b..bed2053b4e 100644 --- a/cmake/targets/BuildMimalloc.cmake +++ b/cmake/targets/BuildMimalloc.cmake @@ -4,7 +4,7 @@ register_repository( REPOSITORY oven-sh/mimalloc COMMIT - 1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a + c1f17cd2538417620f60bff70bffe7e68d332aec ) set(MIMALLOC_CMAKE_ARGS @@ -31,13 +31,7 @@ if(ENABLE_VALGRIND) list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON) endif() -if(WIN32) - if(DEBUG) - set(MIMALLOC_LIBRARY mimalloc-static-debug) - else() - set(MIMALLOC_LIBRARY mimalloc-static) - endif() -elseif(DEBUG) +if(DEBUG) if (ENABLE_ASAN) set(MIMALLOC_LIBRARY mimalloc-asan-debug) else() @@ -53,6 +47,7 @@ if(APPLE OR (LINUX AND NOT DEBUG)) set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o) endif() + register_cmake_command( TARGET mimalloc diff --git a/src/allocators/AllocationScope.zig b/src/allocators/AllocationScope.zig index 324468dd03..a37e3fa555 100644 --- a/src/allocators/AllocationScope.zig +++ b/src/allocators/AllocationScope.zig @@ -216,7 +216,7 @@ pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_add /// Call when the pointer from `trackExternalAllocation` is freed. /// Returns true if the free was invalid. pub fn trackExternalFree(scope: *AllocationScope, slice: anytype, ret_addr: ?usize) bool { - if (comptime !enabled) return; + if (comptime !enabled) return false; const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) { .pointer => |p| switch (p.size) { .slice => brk: { diff --git a/src/allocators/basic.zig b/src/allocators/basic.zig index 44a4b99998..3a4de668af 100644 --- a/src/allocators/basic.zig +++ b/src/allocators/basic.zig @@ -13,7 +13,6 @@ fn mimalloc_free( // but its good to have that assertion // let's only enable it in debug mode if (comptime Environment.isDebug) { - assert(mimalloc.mi_is_in_heap_region(buf.ptr)); if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits())) mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits()) else @@ -25,7 +24,6 @@ fn mimalloc_free( const MimallocAllocator = struct { pub const supports_posix_memalign = true; - fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 { if (comptime Environment.enable_logs) log("mi_alloc({d}, {d})", .{ len, alignment.toByteUnits() }); @@ -148,7 +146,6 @@ const Environment = @import("../env.zig"); const std = @import("std"); const bun = @import("bun"); -const assert = bun.assert; const mimalloc = bun.mimalloc; const mem = @import("std").mem; diff --git a/src/allocators/mimalloc.zig b/src/allocators/mimalloc.zig index 2fa101d04e..b7cbcf6255 100644 --- a/src/allocators/mimalloc.zig +++ b/src/allocators/mimalloc.zig @@ -127,13 +127,14 @@ pub extern fn mi_reserve_huge_os_pages_at(pages: usize, numa_node: c_int, timeou pub extern fn mi_reserve_os_memory(size: usize, commit: bool, allow_large: bool) c_int; pub extern fn mi_manage_os_memory(start: ?*anyopaque, size: usize, is_committed: bool, is_large: bool, is_zero: bool, numa_node: c_int) bool; pub extern fn mi_debug_show_arenas() void; -pub const ArenaID = c_int; -pub extern fn mi_arena_area(arena_id: ArenaID, size: [*c]usize) ?*anyopaque; +pub const ArenaID = ?*anyopaque; +pub extern fn mi_arena_area(arena_id: ArenaID, size: *usize) ?*anyopaque; pub extern fn mi_reserve_huge_os_pages_at_ex(pages: usize, numa_node: c_int, timeout_msecs: usize, exclusive: bool, arena_id: *ArenaID) c_int; pub extern fn mi_reserve_os_memory_ex(size: usize, commit: bool, allow_large: bool, exclusive: bool, arena_id: *ArenaID) c_int; pub extern fn mi_manage_os_memory_ex(start: ?*anyopaque, size: usize, is_committed: bool, is_large: bool, is_zero: bool, numa_node: c_int, exclusive: bool, arena_id: *ArenaID) bool; pub extern fn mi_heap_new_in_arena(arena_id: ArenaID) ?*Heap; pub extern fn mi_reserve_huge_os_pages(pages: usize, max_secs: f64, pages_reserved: [*c]usize) c_int; +pub extern fn mi_thread_set_in_threadpool() void; pub const Option = enum(c_uint) { show_errors = 0, show_stats = 1, @@ -211,4 +212,7 @@ inline fn mi_malloc_satisfies_alignment(alignment: usize, size: usize) bool { (alignment == MI_MAX_ALIGN_SIZE and size >= (MI_MAX_ALIGN_SIZE / 2))); } +pub const mi_arena_id_t = ?*anyopaque; +pub extern fn mi_heap_new_ex(heap_tag: c_int, allow_destroy: bool, arena_id: mi_arena_id_t) ?*Heap; + const std = @import("std"); diff --git a/src/ast/Expr.zig b/src/ast/Expr.zig index 9d5d1def34..a8b3d60b09 100644 --- a/src/ast/Expr.zig +++ b/src/ast/Expr.zig @@ -96,7 +96,7 @@ pub fn fromBlob( if (mime_type.category.isTextLike()) { var output = MutableString.initEmpty(allocator); - output = try JSPrinter.quoteForJSON(bytes, output, true); + try JSPrinter.quoteForJSON(bytes, &output, true); var list = output.toOwnedSlice(); // remove the quotes if (list.len > 0) { diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 066f1a4180..bdd3ff0682 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -2061,7 +2061,7 @@ pub fn finalizeBundle( .gts = undefined, }; - const quoted_source_contents: []const []const u8 = bv2.linker.graph.files.items(.quoted_source_contents); + const quoted_source_contents: []?[]u8 = bv2.linker.graph.files.items(.quoted_source_contents); // Pass 1, update the graph's nodes, resolving every bundler source // index into its `IncrementalGraph(...).FileIndex` for ( @@ -2074,7 +2074,7 @@ pub fn finalizeBundle( bun.assert(compile_result.javascript.result == .result); bun.assert(dev.server_transpiler.options.source_map != .none); bun.assert(!part_range.source_index.isRuntime()); - break :brk .empty; + break :brk .initEmpty(); }; // TODO: investigate why linker.files is not indexed by linker's index // const linker_index = bv2.linker.graph.stable_source_indices[index.get()]; @@ -2087,13 +2087,16 @@ pub fn finalizeBundle( }).receiveChunk( &ctx, index, - .{ .js = .{ - .code = compile_result.code(), - .source_map = .{ - .chunk = source_map, - .escaped_source = @constCast(quoted_contents), + .{ + .js = .{ + .code = compile_result.javascript.code(), + .code_allocator = compile_result.javascript.allocator(), + .source_map = .{ + .chunk = source_map, + .escaped_source = quoted_contents, + }, }, - } }, + }, graph == .ssr, ), } @@ -2179,6 +2182,7 @@ pub fn finalizeBundle( index, .{ .js = .{ .code = generated_js, + .code_allocator = dev.allocator, .source_map = null, } }, false, @@ -2971,6 +2975,9 @@ fn sendBuiltInNotFound(resp: anytype) void { } fn printMemoryLine(dev: *DevServer) void { + if (comptime !bun.Environment.enableAllocScopes) { + return; + } if (!debug.isVisible()) return; Output.prettyErrorln("DevServer tracked {}, measured: {} ({}), process: {}", .{ bun.fmt.size(dev.memoryCost(), .{}), diff --git a/src/bake/DevServer/IncrementalGraph.zig b/src/bake/DevServer/IncrementalGraph.zig index 7459c657f7..8796872dbc 100644 --- a/src/bake/DevServer/IncrementalGraph.zig +++ b/src/bake/DevServer/IncrementalGraph.zig @@ -126,12 +126,15 @@ pub fn IncrementalGraph(side: bake.Side) type { .client => struct { /// Content depends on `flags.kind` /// See function wrappers to safely read into this data - content: extern union { - /// Allocated by `dev.allocator`. Access with `.jsCode()` + content: union { + /// Access contents with `.jsCode()`. /// When stale, the code is "", otherwise it contains at /// least one non-whitespace character, as empty chunks /// contain at least a function wrapper. - js_code_ptr: [*]const u8, + js_code: struct { + ptr: [*]const u8, + allocator: std.mem.Allocator, + }, /// Access with `.cssAssetId()` css_asset_id: u64, @@ -179,18 +182,20 @@ pub fn IncrementalGraph(side: bake.Side) type { }; comptime { - const d = std.debug; - if (!Environment.isDebug) { - d.assert(@sizeOf(@This()) == @sizeOf(u64) * 3); - d.assert(@alignOf(@This()) == @alignOf([*]u8)); + if (@import("builtin").mode == .ReleaseFast or @import("builtin").mode == .ReleaseSmall) { + bun.assert_eql(@sizeOf(@This()), @sizeOf(u64) * 5); + bun.assert_eql(@alignOf(@This()), @alignOf([*]u8)); } } - fn initJavaScript(code_slice: []const u8, flags: Flags, source_map: PackedMap.RefOrEmpty) @This() { + fn initJavaScript(code_slice: []const u8, code_allocator: std.mem.Allocator, flags: Flags, source_map: PackedMap.RefOrEmpty) @This() { assert(flags.kind == .js or flags.kind == .asset); assert(flags.source_map_state == std.meta.activeTag(source_map)); return .{ - .content = .{ .js_code_ptr = code_slice.ptr }, + .content = .{ .js_code = .{ + .ptr = code_slice.ptr, + .allocator = code_allocator, + } }, .code_len = @intCast(code_slice.len), .flags = flags, .source_map = source_map.untag(), @@ -220,7 +225,12 @@ pub fn IncrementalGraph(side: bake.Side) type { fn jsCode(file: @This()) []const u8 { assert(file.flags.kind.hasInlinejscodeChunk()); - return file.content.js_code_ptr[0..file.code_len]; + return file.content.js_code.ptr[0..file.code_len]; + } + + fn freeJsCode(file: *@This()) void { + assert(file.flags.kind.hasInlinejscodeChunk()); + file.content.js_code.allocator.free(file.jsCode()); } fn cssAssetId(file: @This()) u64 { @@ -250,7 +260,7 @@ pub fn IncrementalGraph(side: bake.Side) type { fn freeFileContent(g: *IncrementalGraph(.client), key: []const u8, file: *File, css: enum { unref_css, ignore_css }) void { switch (file.flags.kind) { .js, .asset => { - g.owner().allocator.free(file.jsCode()); + file.freeJsCode(); switch (file.sourceMap()) { .ref => |ptr| { ptr.derefWithContext(g.owner()); @@ -386,9 +396,10 @@ pub fn IncrementalGraph(side: bake.Side) type { content: union(enum) { js: struct { code: []const u8, + code_allocator: std.mem.Allocator, source_map: ?struct { chunk: SourceMap.Chunk, - escaped_source: []u8, + escaped_source: ?[]u8, }, }, css: u64, @@ -475,24 +486,22 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (content) { .css => |css| gop.value_ptr.* = .initCSS(css, flags), .js => |js| { - dev.allocation_scope.assertOwned(js.code); - // Insert new source map or patch existing empty source map. const source_map: PackedMap.RefOrEmpty = brk: { if (js.source_map) |source_map| { bun.debugAssert(!flags.is_html_route); // suspect behind #17956 if (source_map.chunk.buffer.len() > 0) { - dev.allocation_scope.assertOwned(source_map.chunk.buffer.list.items); - dev.allocation_scope.assertOwned(source_map.escaped_source); flags.source_map_state = .ref; break :brk .{ .ref = PackedMap.newNonEmpty( source_map.chunk, - source_map.escaped_source, + source_map.escaped_source.?, ) }; } var take = source_map.chunk.buffer; take.deinit(); - dev.allocator.free(source_map.escaped_source); + if (source_map.escaped_source) |escaped_source| { + bun.default_allocator.free(escaped_source); + } } // Must precompute this. Otherwise, source maps won't have @@ -508,7 +517,7 @@ pub fn IncrementalGraph(side: bake.Side) type { } }; }; - gop.value_ptr.* = .initJavaScript(js.code, flags, source_map); + gop.value_ptr.* = .initJavaScript(js.code, js.code_allocator, flags, source_map); // Track JavaScript chunks for concatenation try g.current_chunk_parts.append(dev.allocator, file_index); @@ -579,7 +588,9 @@ pub fn IncrementalGraph(side: bake.Side) type { if (content.js.source_map) |source_map| { var take = source_map.chunk.buffer; take.deinit(); - dev.allocator.free(source_map.escaped_source); + if (source_map.escaped_source) |escaped_source| { + bun.default_allocator.free(escaped_source); + } } } }, diff --git a/src/bake/DevServer/PackedMap.zig b/src/bake/DevServer/PackedMap.zig index 7a2d2f840a..83fb0922e7 100644 --- a/src/bake/DevServer/PackedMap.zig +++ b/src/bake/DevServer/PackedMap.zig @@ -11,6 +11,7 @@ ref_count: RefCount, /// This is stored to allow lazy construction of source map files. vlq_ptr: [*]u8, vlq_len: u32, +vlq_allocator: std.mem.Allocator, /// The bundler runs quoting on multiple threads, so it only makes /// sense to preserve that effort for concatenation and /// re-concatenation. @@ -31,24 +32,26 @@ end_state: struct { /// already counted for. bits_used_for_memory_cost_dedupe: u32 = 0, -pub fn newNonEmpty(source_map: SourceMap.Chunk, quoted_contents: []u8) bun.ptr.RefPtr(PackedMap) { - assert(source_map.buffer.list.items.len > 0); +pub fn newNonEmpty(chunk: SourceMap.Chunk, quoted_contents: []u8) bun.ptr.RefPtr(PackedMap) { + assert(chunk.buffer.list.items.len > 0); + var buffer = chunk.buffer; + const slice = buffer.toOwnedSlice(); return .new(.{ .ref_count = .init(), - .vlq_ptr = source_map.buffer.list.items.ptr, - .vlq_len = @intCast(source_map.buffer.list.items.len), + .vlq_ptr = slice.ptr, + .vlq_len = @intCast(slice.len), + .vlq_allocator = buffer.allocator, .quoted_contents_ptr = quoted_contents.ptr, .quoted_contents_len = @intCast(quoted_contents.len), .end_state = .{ - .original_line = source_map.end_state.original_line, - .original_column = source_map.end_state.original_column, + .original_line = chunk.end_state.original_line, + .original_column = chunk.end_state.original_column, }, }); } -fn destroy(self: *@This(), dev: *DevServer) void { - dev.allocator.free(self.vlq()); - dev.allocator.free(self.quotedContents()); +fn destroy(self: *@This(), _: *DevServer) void { + self.vlq_allocator.free(self.vlq()); bun.destroy(self); } @@ -78,7 +81,7 @@ pub fn quotedContents(self: *const @This()) []u8 { comptime { if (!Environment.isDebug) { - assert_eql(@sizeOf(@This()), @sizeOf(usize) * 5); + assert_eql(@sizeOf(@This()), @sizeOf(usize) * 7); assert_eql(@alignOf(@This()), @alignOf(usize)); } } @@ -156,6 +159,8 @@ pub const RefOrEmpty = union(enum(u1)) { }; }; +const std = @import("std"); + const bun = @import("bun"); const Environment = bun.Environment; const SourceMap = bun.sourcemap; diff --git a/src/bun.js.zig b/src/bun.js.zig index d05a6818ea..3def2c2445 100644 --- a/src/bun.js.zig +++ b/src/bun.js.zig @@ -23,7 +23,7 @@ pub const Run = struct { js_ast.Expr.Data.Store.create(); js_ast.Stmt.Data.Store.create(); - var arena = try Arena.init(); + const arena = try Arena.init(); if (!ctx.debug.loaded_bunfig) { try bun.cli.Arguments.loadConfigPath(ctx.allocator, true, "bunfig.toml", ctx, .RunCommand); @@ -31,7 +31,7 @@ pub const Run = struct { run = .{ .vm = try VirtualMachine.initWithModuleGraph(.{ - .allocator = arena.allocator(), + .allocator = bun.default_allocator, .log = ctx.log, .args = ctx.args, .graph = graph_ptr, @@ -48,7 +48,7 @@ pub const Run = struct { vm.preload = ctx.preloads; vm.argv = ctx.passthrough; vm.arena = &run.arena; - vm.allocator = arena.allocator(); + vm.allocator = bun.default_allocator; b.options.install = ctx.install; b.resolver.opts.install = ctx.install; @@ -160,12 +160,12 @@ pub const Run = struct { js_ast.Expr.Data.Store.create(); js_ast.Stmt.Data.Store.create(); - var arena = try Arena.init(); + const arena = try Arena.init(); run = .{ .vm = try VirtualMachine.init( .{ - .allocator = arena.allocator(), + .allocator = bun.default_allocator, .log = ctx.log, .args = ctx.args, .store_fd = ctx.debug.hot_reload != .none, @@ -187,7 +187,7 @@ pub const Run = struct { vm.preload = ctx.preloads; vm.argv = ctx.passthrough; vm.arena = &run.arena; - vm.allocator = arena.allocator(); + vm.allocator = bun.default_allocator; if (ctx.runtime_options.eval.script.len > 0) { const script_source = try bun.default_allocator.create(logger.Source); diff --git a/src/bun.js/SavedSourceMap.zig b/src/bun.js/SavedSourceMap.zig index 68fcdb75e9..706a748507 100644 --- a/src/bun.js/SavedSourceMap.zig +++ b/src/bun.js/SavedSourceMap.zig @@ -166,7 +166,7 @@ pub fn deinit(this: *SavedSourceMap) void { } pub fn putMappings(this: *SavedSourceMap, source: *const logger.Source, mappings: MutableString) !void { - try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, mappings.list.items.ptr))); + try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, try bun.default_allocator.dupe(u8, mappings.list.items)))); } pub fn putValue(this: *SavedSourceMap, path: []const u8, value: Value) !void { diff --git a/src/bun.js/VirtualMachine.zig b/src/bun.js/VirtualMachine.zig index 67bf251a07..0525e201fc 100644 --- a/src/bun.js/VirtualMachine.zig +++ b/src/bun.js/VirtualMachine.zig @@ -194,6 +194,8 @@ commonjs_custom_extensions: bun.StringArrayHashMapUnmanaged(node_module_module.C /// The value is decremented when defaults are restored. has_mutated_built_in_extensions: u32 = 0, +initial_script_execution_context_identifier: i32, + pub const ProcessAutoKiller = @import("./ProcessAutoKiller.zig"); pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSGlobalObject, JSValue) void; @@ -367,7 +369,7 @@ const SourceMapHandlerGetter = struct { pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void { var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator); defer temp_json_buffer.deinit(); - temp_json_buffer = try chunk.printSourceMapContentsAtOffset(source, temp_json_buffer, true, SavedSourceMap.vlq_offset, true); + try chunk.printSourceMapContentsAtOffset(source, &temp_json_buffer, true, SavedSourceMap.vlq_offset, true); const source_map_url_prefix_start = "//# sourceMappingURL=data:application/json;base64,"; // TODO: do we need to %-encode the path? const source_url_len = source.path.text.len; @@ -984,6 +986,7 @@ pub fn initWithModuleGraph( .standalone_module_graph = opts.graph.?, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, + .initial_script_execution_context_identifier = if (opts.is_main_thread) 1 else std.math.maxInt(i32), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( @@ -1016,7 +1019,7 @@ pub fn initWithModuleGraph( vm.global = JSGlobalObject.create( vm, vm.console, - if (opts.is_main_thread) 1 else std.math.maxInt(i32), + vm.initial_script_execution_context_identifier, false, false, null, @@ -1105,6 +1108,7 @@ pub fn init(opts: Options) !*VirtualMachine { .ref_strings_mutex = .{}, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, + .initial_script_execution_context_identifier = if (opts.is_main_thread) 1 else std.math.maxInt(i32), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( @@ -1134,7 +1138,7 @@ pub fn init(opts: Options) !*VirtualMachine { vm.global = JSGlobalObject.create( vm, vm.console, - if (opts.is_main_thread) 1 else std.math.maxInt(i32), + vm.initial_script_execution_context_identifier, opts.smol, opts.eval, null, @@ -1264,6 +1268,7 @@ pub fn initWorker( .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), // This option is irrelevant for Workers .destruct_main_thread_on_exit = false, + .initial_script_execution_context_identifier = @as(i32, @intCast(worker.execution_context_id)), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( @@ -1297,7 +1302,7 @@ pub fn initWorker( vm.global = JSGlobalObject.create( vm, vm.console, - @as(i32, @intCast(worker.execution_context_id)), + vm.initial_script_execution_context_identifier, worker.mini, opts.eval, worker.cpp_worker, @@ -1355,6 +1360,7 @@ pub fn initBake(opts: Options) anyerror!*VirtualMachine { .ref_strings_mutex = .{}, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, + .initial_script_execution_context_identifier = if (opts.is_main_thread) 1 else std.math.maxInt(i32), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( diff --git a/src/bun.js/api/Timer/WTFTimer.zig b/src/bun.js/api/Timer/WTFTimer.zig index e91d0d321e..e93883d760 100644 --- a/src/bun.js/api/Timer/WTFTimer.zig +++ b/src/bun.js/api/Timer/WTFTimer.zig @@ -14,6 +14,7 @@ event_loop_timer: EventLoopTimer, imminent: *std.atomic.Value(?*WTFTimer), repeat: bool, lock: bun.Mutex = .{}, +script_execution_context_id: bun.webcore.ScriptExecutionContext.Identifier, const new = bun.TrivialNew(WTFTimer); @@ -56,9 +57,13 @@ pub fn update(this: *WTFTimer, seconds: f64, repeat: bool) void { pub fn cancel(this: *WTFTimer) void { this.lock.lock(); defer this.lock.unlock(); - this.imminent.store(null, .seq_cst); - if (this.event_loop_timer.state == .ACTIVE) { - this.vm.timer.remove(&this.event_loop_timer); + + if (this.script_execution_context_id.valid()) { + this.imminent.store(null, .seq_cst); + + if (this.event_loop_timer.state == .ACTIVE) { + this.vm.timer.remove(&this.event_loop_timer); + } } } @@ -97,6 +102,7 @@ export fn WTFTimer__create(run_loop_timer: *RunLoopTimer) ?*anyopaque { }, .run_loop_timer = run_loop_timer, .repeat = false, + .script_execution_context_id = @enumFromInt(vm.initial_script_execution_context_identifier), }); return this; diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 28efbef99d..119bbe17a3 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -60,8 +60,7 @@ pub fn NewSocket(comptime ssl: bool) type { flags: Flags = .{}, ref_count: RefCount, wrapped: WrappedType = .none, - // TODO: make this optional - handlers: *Handlers, + handlers: ?*Handlers, this_value: jsc.JSValue = .zero, poll_ref: Async.KeepAlive = Async.KeepAlive.init(), ref_pollref_on_connect: bool = true, @@ -208,7 +207,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn handleError(this: *This, err_value: jsc.JSValue) void { log("handleError", .{}); - const handlers = this.handlers; + const handlers = this.getHandlers(); var vm = handlers.vm; if (vm.isShuttingDown()) { return; @@ -226,7 +225,7 @@ pub fn NewSocket(comptime ssl: bool) type { jsc.markBinding(@src()); if (this.socket.isDetached()) return; if (this.native_callback.onWritable()) return; - const handlers = this.handlers; + const handlers = this.getHandlers(); const callback = handlers.onWritable; if (callback == .zero) return; @@ -256,8 +255,8 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onTimeout(this: *This, _: Socket) void { jsc.markBinding(@src()); if (this.socket.isDetached()) return; - log("onTimeout {s}", .{if (this.handlers.is_server) "S" else "C"}); - const handlers = this.handlers; + const handlers = this.getHandlers(); + log("onTimeout {s}", .{if (handlers.is_server) "S" else "C"}); const callback = handlers.onTimeout; if (callback == .zero or this.flags.finalizing) return; if (handlers.vm.isShuttingDown()) { @@ -276,8 +275,13 @@ pub fn NewSocket(comptime ssl: bool) type { }; } + pub fn getHandlers(this: *const This) *Handlers { + return this.handlers orelse @panic("No handlers set on Socket"); + } + pub fn handleConnectError(this: *This, errno: c_int) void { - log("onConnectError {s} ({d}, {d})", .{ if (this.handlers.is_server) "S" else "C", errno, this.ref_count.active_counts }); + const handlers = this.getHandlers(); + log("onConnectError {s} ({d}, {d})", .{ if (handlers.is_server) "S" else "C", errno, this.ref_count.active_counts }); // Ensure the socket is still alive for any defer's we have this.ref(); defer this.deref(); @@ -288,7 +292,6 @@ pub fn NewSocket(comptime ssl: bool) type { defer this.markInactive(); defer if (needs_deref) this.deref(); - const handlers = this.handlers; const vm = handlers.vm; this.poll_ref.unrefOnNextTick(vm); if (vm.isShuttingDown()) { @@ -357,7 +360,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn markActive(this: *This) void { if (!this.flags.is_active) { - this.handlers.markActive(); + this.getHandlers().markActive(); this.flags.is_active = true; this.has_pending_activity.store(true, .release); } @@ -385,15 +388,20 @@ pub fn NewSocket(comptime ssl: bool) type { } this.flags.is_active = false; - const vm = this.handlers.vm; - this.handlers.markInactive(); + const handlers = this.getHandlers(); + const vm = handlers.vm; + handlers.markInactive(); this.poll_ref.unref(vm); this.has_pending_activity.store(false, .release); } } + pub fn isServer(this: *const This) bool { + return this.getHandlers().is_server; + } + pub fn onOpen(this: *This, socket: Socket) void { - log("onOpen {s} {*} {} {}", .{ if (this.handlers.is_server) "S" else "C", this, this.socket.isDetached(), this.ref_count.active_counts }); + log("onOpen {s} {*} {} {}", .{ if (this.isServer()) "S" else "C", this, this.socket.isDetached(), this.ref_count.active_counts }); // Ensure the socket remains alive until this is finished this.ref(); defer this.deref(); @@ -425,7 +433,7 @@ pub fn NewSocket(comptime ssl: bool) type { } } if (this.protos) |protos| { - if (this.handlers.is_server) { + if (this.isServer()) { BoringSSL.SSL_CTX_set_alpn_select_cb(BoringSSL.SSL_get_SSL_CTX(ssl_ptr), selectALPNCallback, bun.cast(*anyopaque, this)); } else { _ = BoringSSL.SSL_set_alpn_protos(ssl_ptr, protos.ptr, @as(c_uint, @intCast(protos.len))); @@ -441,7 +449,7 @@ pub fn NewSocket(comptime ssl: bool) type { } } - const handlers = this.handlers; + const handlers = this.getHandlers(); const callback = handlers.onOpen; const handshake_callback = handlers.onHandshake; @@ -493,13 +501,12 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onEnd(this: *This, _: Socket) void { jsc.markBinding(@src()); if (this.socket.isDetached()) return; - log("onEnd {s}", .{if (this.handlers.is_server) "S" else "C"}); + const handlers = this.getHandlers(); + log("onEnd {s}", .{if (handlers.is_server) "S" else "C"}); // Ensure the socket remains alive until this is finished this.ref(); defer this.deref(); - const handlers = this.handlers; - const callback = handlers.onEnd; if (callback == .zero or handlers.vm.isShuttingDown()) { this.poll_ref.unref(handlers.vm); @@ -525,13 +532,13 @@ pub fn NewSocket(comptime ssl: bool) type { jsc.markBinding(@src()); this.flags.handshake_complete = true; if (this.socket.isDetached()) return; - log("onHandshake {s} ({d})", .{ if (this.handlers.is_server) "S" else "C", success }); + const handlers = this.getHandlers(); + log("onHandshake {s} ({d})", .{ if (handlers.is_server) "S" else "C", success }); const authorized = if (success == 1) true else false; this.flags.authorized = authorized; - const handlers = this.handlers; var callback = handlers.onHandshake; var is_open = false; @@ -567,8 +574,8 @@ pub fn NewSocket(comptime ssl: bool) type { // clean onOpen callback so only called in the first handshake and not in every renegotiation // on servers this would require a different approach but it's not needed because our servers will not call handshake multiple times // servers don't support renegotiation - this.handlers.onOpen.unprotect(); - this.handlers.onOpen = .zero; + this.handlers.?.onOpen.unprotect(); + this.handlers.?.onOpen = .zero; } } else { // call handhsake callback with authorized and authorization error if has one @@ -591,7 +598,8 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onClose(this: *This, _: Socket, err: c_int, _: ?*anyopaque) void { jsc.markBinding(@src()); - log("onClose {s}", .{if (this.handlers.is_server) "S" else "C"}); + const handlers = this.getHandlers(); + log("onClose {s}", .{if (handlers.is_server) "S" else "C"}); this.detachNativeCallback(); this.socket.detach(); defer this.deref(); @@ -601,7 +609,6 @@ pub fn NewSocket(comptime ssl: bool) type { return; } - const handlers = this.handlers; const vm = handlers.vm; this.poll_ref.unref(vm); @@ -638,10 +645,10 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onData(this: *This, _: Socket, data: []const u8) void { jsc.markBinding(@src()); if (this.socket.isDetached()) return; - log("onData {s} ({d})", .{ if (this.handlers.is_server) "S" else "C", data.len }); + const handlers = this.getHandlers(); + log("onData {s} ({d})", .{ if (handlers.is_server) "S" else "C", data.len }); if (this.native_callback.onData(data)) return; - const handlers = this.handlers; const callback = handlers.onData; if (callback == .zero or this.flags.finalizing) return; if (handlers.vm.isShuttingDown()) { @@ -680,11 +687,13 @@ pub fn NewSocket(comptime ssl: bool) type { } pub fn getListener(this: *This, _: *jsc.JSGlobalObject) JSValue { - if (!this.handlers.is_server or this.socket.isDetached()) { + const handlers = this.getHandlers(); + + if (!handlers.is_server or this.socket.isDetached()) { return .js_undefined; } - const l: *Listener = @fieldParentPtr("handlers", this.handlers); + const l: *Listener = @fieldParentPtr("handlers", handlers); return l.strong_self.get() orelse .js_undefined; } @@ -1341,13 +1350,14 @@ pub fn NewSocket(comptime ssl: bool) type { return globalObject.throw("Expected \"socket\" option", .{}); }; - const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); + var prev_handlers = this.getHandlers(); + + const handlers = try Handlers.fromJS(globalObject, socket_obj, prev_handlers.is_server); - var prev_handlers = this.handlers; prev_handlers.unprotect(); - this.handlers.* = handlers; // TODO: this is a memory leak - this.handlers.withAsyncContextIfNeeded(globalObject); - this.handlers.protect(); + this.handlers.?.* = handlers; // TODO: this is a memory leak + this.handlers.?.withAsyncContextIfNeeded(globalObject); + this.handlers.?.protect(); return .js_undefined; } @@ -1389,7 +1399,7 @@ pub fn NewSocket(comptime ssl: bool) type { return .zero; } - var handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); + var handlers = try Handlers.fromJS(globalObject, socket_obj, this.isServer()); if (globalObject.hasException()) { return .zero; @@ -1519,20 +1529,23 @@ pub fn NewSocket(comptime ssl: bool) type { const vm = handlers.vm; var raw_handlers_ptr = bun.default_allocator.create(Handlers) catch bun.outOfMemory(); - raw_handlers_ptr.* = .{ - .vm = vm, - .globalObject = globalObject, - .onOpen = this.handlers.onOpen, - .onClose = this.handlers.onClose, - .onData = this.handlers.onData, - .onWritable = this.handlers.onWritable, - .onTimeout = this.handlers.onTimeout, - .onConnectError = this.handlers.onConnectError, - .onEnd = this.handlers.onEnd, - .onError = this.handlers.onError, - .onHandshake = this.handlers.onHandshake, - .binary_type = this.handlers.binary_type, - .is_server = this.handlers.is_server, + raw_handlers_ptr.* = blk: { + const this_handlers = this.getHandlers(); + break :blk .{ + .vm = vm, + .globalObject = globalObject, + .onOpen = this_handlers.onOpen, + .onClose = this_handlers.onClose, + .onData = this_handlers.onData, + .onWritable = this_handlers.onWritable, + .onTimeout = this_handlers.onTimeout, + .onConnectError = this_handlers.onConnectError, + .onEnd = this_handlers.onEnd, + .onError = this_handlers.onError, + .onHandshake = this_handlers.onHandshake, + .binary_type = this_handlers.binary_type, + .is_server = this_handlers.is_server, + }; }; raw_handlers_ptr.protect(); @@ -1562,7 +1575,7 @@ pub fn NewSocket(comptime ssl: bool) type { tls.markActive(); // we're unrefing the original instance and refing the TLS instance - tls.poll_ref.ref(this.handlers.vm); + tls.poll_ref.ref(this.getHandlers().vm); // mark both instances on socket data if (new_socket.ext(WrappedSocket)) |ctx| { @@ -1574,7 +1587,7 @@ pub fn NewSocket(comptime ssl: bool) type { this.flags.is_active = false; // will free handlers when hits 0 active connections // the connection can be upgraded inside a handler call so we need to guarantee that it will be still alive - this.handlers.markInactive(); + this.getHandlers().markInactive(); this.has_pending_activity.store(false, .release); } diff --git a/src/bun.js/api/bun/socket/Listener.zig b/src/bun.js/api/bun/socket/Listener.zig index d87cd2bf6d..e241a05542 100644 --- a/src/bun.js/api/bun/socket/Listener.zig +++ b/src/bun.js/api/bun/socket/Listener.zig @@ -626,7 +626,9 @@ pub fn connectInner(globalObject: *jsc.JSGlobalObject, prev_maybe_tcp: ?*TCPSock if (ssl_enabled) { var tls = if (prev_maybe_tls) |prev| blk: { - bun.destroy(prev.handlers); + if (prev.handlers) |prev_handlers| { + bun.destroy(prev_handlers); + } bun.assert(prev.this_value != .zero); prev.handlers = handlers_ptr; bun.assert(prev.socket.socket == .detached); diff --git a/src/bun.js/api/bun/socket/tls_socket_functions.zig b/src/bun.js/api/bun/socket/tls_socket_functions.zig index 37e6b84c42..43ee2f9a6b 100644 --- a/src/bun.js/api/bun/socket/tls_socket_functions.zig +++ b/src/bun.js/api/bun/socket/tls_socket_functions.zig @@ -9,7 +9,7 @@ pub fn getServername(this: *This, globalObject: *jsc.JSGlobalObject, _: *jsc.Cal } pub fn setServername(this: *This, globalObject: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue { - if (this.handlers.is_server) { + if (this.isServer()) { return globalObject.throw("Cannot issue SNI from a TLS server-side socket", .{}); } @@ -118,7 +118,7 @@ pub fn getPeerCertificate(this: *This, globalObject: *jsc.JSGlobalObject, callfr const ssl_ptr = this.socket.ssl() orelse return .js_undefined; if (abbreviated) { - if (this.handlers.is_server) { + if (this.isServer()) { const cert = BoringSSL.SSL_get_peer_certificate(ssl_ptr); if (cert) |x509| { return X509.toJS(x509, globalObject); @@ -130,7 +130,7 @@ pub fn getPeerCertificate(this: *This, globalObject: *jsc.JSGlobalObject, callfr return X509.toJS(cert, globalObject); } var cert: ?*BoringSSL.X509 = null; - if (this.handlers.is_server) { + if (this.isServer()) { cert = BoringSSL.SSL_get_peer_certificate(ssl_ptr); } @@ -380,7 +380,7 @@ pub fn exportKeyingMaterial(this: *This, globalObject: *jsc.JSGlobalObject, call pub fn getEphemeralKeyInfo(this: *This, globalObject: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!JSValue { // only available for clients - if (this.handlers.is_server) { + if (this.isServer()) { return JSValue.jsNull(); } var result = JSValue.createEmptyObject(globalObject, 3); @@ -553,7 +553,7 @@ pub fn setVerifyMode(this: *This, globalObject: *jsc.JSGlobalObject, callframe: const request_cert = request_cert_js.toBoolean(); const reject_unauthorized = request_cert_js.toBoolean(); var verify_mode: c_int = BoringSSL.SSL_VERIFY_NONE; - if (this.handlers.is_server) { + if (this.isServer()) { if (request_cert) { verify_mode = BoringSSL.SSL_VERIFY_PEER; if (reject_unauthorized) diff --git a/src/bun.js/bindings/ScriptExecutionContext.cpp b/src/bun.js/bindings/ScriptExecutionContext.cpp index e43991ceb6..721e437026 100644 --- a/src/bun.js/bindings/ScriptExecutionContext.cpp +++ b/src/bun.js/bindings/ScriptExecutionContext.cpp @@ -69,8 +69,11 @@ static HashMap& allSc ScriptExecutionContext* ScriptExecutionContext::getScriptExecutionContext(ScriptExecutionContextIdentifier identifier) { + if (identifier == 0) { + return nullptr; + } Locker locker { allScriptExecutionContextsMapLock }; - return allScriptExecutionContextsMap().get(identifier); + return allScriptExecutionContextsMap().getOptional(identifier).value_or(nullptr); } template diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 928db9754f..15703aa7b9 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -299,3 +299,5 @@ extern "C" bool icu_hasBinaryProperty(UChar32 cp, unsigned int prop) { return u_hasBinaryProperty(cp, static_cast(prop)); } + +extern "C" __attribute__((weak)) void mi_thread_set_in_threadpool() {} diff --git a/src/bun.js/node/node_net_binding.zig b/src/bun.js/node/node_net_binding.zig index 1bb908171b..ee9be71556 100644 --- a/src/bun.js/node/node_net_binding.zig +++ b/src/bun.js/node/node_net_binding.zig @@ -80,7 +80,7 @@ pub fn newDetachedSocket(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFr .socket_context = null, .ref_count = .init(), .protos = null, - .handlers = undefined, + .handlers = null, }); return socket.getThisValue(globalThis); } else { @@ -89,7 +89,7 @@ pub fn newDetachedSocket(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFr .socket_context = null, .ref_count = .init(), .protos = null, - .handlers = undefined, + .handlers = null, }); return socket.getThisValue(globalThis); } diff --git a/src/bun.js/web_worker.zig b/src/bun.js/web_worker.zig index 093fa17664..1b561dce7a 100644 --- a/src/bun.js/web_worker.zig +++ b/src/bun.js/web_worker.zig @@ -312,12 +312,12 @@ pub fn start( this.arena = try bun.MimallocArena.init(); var vm = try jsc.VirtualMachine.initWorker(this, .{ - .allocator = this.arena.?.allocator(), + .allocator = bun.default_allocator, .args = transform_options, .store_fd = this.store_fd, .graph = this.parent.standalone_module_graph, }); - vm.allocator = this.arena.?.allocator(); + vm.allocator = bun.default_allocator; vm.arena = &this.arena.?; var b = &vm.transpiler; diff --git a/src/bun.js/webcore/ScriptExecutionContext.zig b/src/bun.js/webcore/ScriptExecutionContext.zig index 7cfc11f899..002a698ea3 100644 --- a/src/bun.js/webcore/ScriptExecutionContext.zig +++ b/src/bun.js/webcore/ScriptExecutionContext.zig @@ -15,6 +15,10 @@ pub const Identifier = enum(u32) { // concurrently because we expect these identifiers are mostly used by off-thread tasks return (self.globalObject() orelse return null).bunVMConcurrently(); } + + pub fn valid(self: Identifier) bool { + return self.globalObject() != null; + } }; const bun = @import("bun"); diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index b3659c1fb6..083efafd20 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -22,8 +22,6 @@ pub const LinkerContext = struct { options: LinkerOptions = .{}, - ambiguous_result_pool: std.ArrayList(MatchImport) = undefined, - loop: EventLoop, /// string buffer containing pre-formatted unique keys @@ -147,18 +145,25 @@ pub const LinkerContext = struct { ); } - pub fn computeQuotedSourceContents(this: *LinkerContext, allocator: std.mem.Allocator, source_index: Index.Int) void { + pub fn computeQuotedSourceContents(this: *LinkerContext, _: std.mem.Allocator, source_index: Index.Int) void { debug("Computing Quoted Source Contents: {d}", .{source_index}); const loader: options.Loader = this.parse_graph.input_files.items(.loader)[source_index]; - const quoted_source_contents: *string = &this.graph.files.items(.quoted_source_contents)[source_index]; + const quoted_source_contents: *?[]u8 = &this.graph.files.items(.quoted_source_contents)[source_index]; if (!loader.canHaveSourceMap()) { - quoted_source_contents.* = ""; + if (quoted_source_contents.*) |slice| { + bun.default_allocator.free(slice); + quoted_source_contents.* = null; + } return; } const source: *const Logger.Source = &this.parse_graph.input_files.items(.source)[source_index]; - const mutable = MutableString.initEmpty(allocator); - quoted_source_contents.* = (js_printer.quoteForJSON(source.contents, mutable, false) catch bun.outOfMemory()).list.items; + var mutable = MutableString.initEmpty(bun.default_allocator); + js_printer.quoteForJSON(source.contents, &mutable, false) catch bun.outOfMemory(); + if (quoted_source_contents.*) |slice| { + bun.default_allocator.free(slice); + } + quoted_source_contents.* = mutable.slice(); } }; @@ -208,7 +213,6 @@ pub const LinkerContext = struct { try this.graph.load(entry_points, sources, server_component_boundaries, bundle.dynamic_import_entry_points.keys()); bundle.dynamic_import_entry_points.deinit(); - this.ambiguous_result_pool = std.ArrayList(MatchImport).init(this.allocator); var runtime_named_exports = &this.graph.ast.items(.named_exports)[Index.runtime.get()]; @@ -709,8 +713,8 @@ pub const LinkerContext = struct { } var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + 2); - quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false); - j.pushStatic(quote_buf.list.items); // freed by arena + try js_printer.quoteForJSON(path.pretty, "e_buf, false); + j.pushStatic(quote_buf.slice()); // freed by arena } var next_mapping_source_index: i32 = 1; @@ -730,8 +734,8 @@ pub const LinkerContext = struct { var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + ", ".len + 2); quote_buf.appendAssumeCapacity(", "); - quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false); - j.pushStatic(quote_buf.list.items); // freed by arena + try js_printer.quoteForJSON(path.pretty, "e_buf, false); + j.pushStatic(quote_buf.slice()); // freed by arena } } @@ -743,11 +747,11 @@ pub const LinkerContext = struct { const source_indices_for_contents = source_id_map.keys(); if (source_indices_for_contents.len > 0) { j.pushStatic("\n "); - j.pushStatic(quoted_source_map_contents[source_indices_for_contents[0]]); + j.pushStatic(quoted_source_map_contents[source_indices_for_contents[0]] orelse ""); for (source_indices_for_contents[1..]) |index| { j.pushStatic(",\n "); - j.pushStatic(quoted_source_map_contents[index]); + j.pushStatic(quoted_source_map_contents[index] orelse ""); } } j.pushStatic( @@ -2417,7 +2421,11 @@ pub const LinkerContext = struct { // 4. externals return .{ .joiner = j.* }; - var pieces = try std.ArrayList(OutputPiece).initCapacity(allocator, count); + var pieces = brk: { + errdefer j.deinit(); + break :brk try std.ArrayList(OutputPiece).initCapacity(allocator, count); + }; + errdefer pieces.deinit(); const complete_output = try j.done(allocator); var output = complete_output; diff --git a/src/bundler/LinkerGraph.zig b/src/bundler/LinkerGraph.zig index 45330f3d93..1fabfea6d0 100644 --- a/src/bundler/LinkerGraph.zig +++ b/src/bundler/LinkerGraph.zig @@ -429,7 +429,7 @@ pub const File = struct { entry_point_chunk_index: u32 = std.math.maxInt(u32), line_offset_table: bun.sourcemap.LineOffsetTable.List = .empty, - quoted_source_contents: string = "", + quoted_source_contents: ?[]u8 = null, pub fn isEntryPoint(this: *const File) bool { return this.entry_point_kind.isEntryPoint(); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 195ad49cda..da5dacd792 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -3879,6 +3879,21 @@ pub const CompileResult = union(enum) { javascript: struct { source_index: Index.Int, result: js_printer.PrintResult, + + pub fn code(this: @This()) []const u8 { + return switch (this.result) { + .result => |result| result.code, + else => "", + }; + } + + pub fn allocator(this: @This()) std.mem.Allocator { + return switch (this.result) { + .result => |result| result.code_allocator, + // empty slice can be freed by any allocator + else => bun.default_allocator, + }; + } }, css: struct { result: bun.Maybe([]const u8, anyerror), @@ -3898,6 +3913,7 @@ pub const CompileResult = union(enum) { .result = js_printer.PrintResult{ .result = .{ .code = "", + .code_allocator = bun.default_allocator, }, }, }, @@ -3905,10 +3921,7 @@ pub const CompileResult = union(enum) { pub fn code(this: *const CompileResult) []const u8 { return switch (this.*) { - .javascript => |r| switch (r.result) { - .result => |r2| r2.code, - else => "", - }, + .javascript => |r| r.code(), .css => |*c| switch (c.result) { .result => |v| v, .err => "", @@ -3917,6 +3930,13 @@ pub const CompileResult = union(enum) { }; } + pub fn allocator(this: *const CompileResult) ?std.mem.Allocator { + return switch (this.*) { + .javascript => |js| js.allocator(), + else => null, + }; + } + pub fn sourceMapChunk(this: *const CompileResult) ?sourcemap.Chunk { return switch (this.*) { .javascript => |r| switch (r.result) { diff --git a/src/bundler/linker_context/generateChunksInParallel.zig b/src/bundler/linker_context/generateChunksInParallel.zig index e3d868f846..d4a33f371c 100644 --- a/src/bundler/linker_context/generateChunksInParallel.zig +++ b/src/bundler/linker_context/generateChunksInParallel.zig @@ -71,7 +71,7 @@ pub fn generateChunksInParallel( } { - const chunk_contexts = c.allocator.alloc(GenerateChunkCtx, chunks.len) catch unreachable; + const chunk_contexts = c.allocator.alloc(GenerateChunkCtx, chunks.len) catch bun.outOfMemory(); defer c.allocator.free(chunk_contexts); { @@ -102,7 +102,7 @@ pub fn generateChunksInParallel( debug(" START {d} compiling part ranges", .{total_count}); defer debug(" DONE {d} compiling part ranges", .{total_count}); - const combined_part_ranges = c.allocator.alloc(PendingPartRange, total_count) catch unreachable; + const combined_part_ranges = c.allocator.alloc(PendingPartRange, total_count) catch bun.outOfMemory(); defer c.allocator.free(combined_part_ranges); var remaining_part_ranges = combined_part_ranges; var batch = ThreadPoolLib.Batch{}; @@ -315,7 +315,7 @@ pub fn generateChunksInParallel( } const bundler = @as(*bun.bundle_v2.BundleV2, @fieldParentPtr("linker", c)); - var static_route_visitor = StaticRouteVisitor{ .c = c, .visited = bun.bit_set.AutoBitSet.initEmpty(bun.default_allocator, c.graph.files.len) catch unreachable }; + var static_route_visitor = StaticRouteVisitor{ .c = c, .visited = bun.bit_set.AutoBitSet.initEmpty(bun.default_allocator, c.graph.files.len) catch bun.outOfMemory() }; defer static_route_visitor.deinit(); if (root_path.len > 0) { @@ -354,7 +354,7 @@ pub fn generateChunksInParallel( switch (chunk.content.sourcemap(c.options.source_maps)) { .external, .linked => |tag| { const output_source_map = chunk.output_source_map.finalize(bun.default_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map"); - var source_map_final_rel_path = bun.default_allocator.alloc(u8, chunk.final_rel_path.len + ".map".len) catch unreachable; + var source_map_final_rel_path = bun.default_allocator.alloc(u8, chunk.final_rel_path.len + ".map".len) catch bun.outOfMemory(); bun.copy(u8, source_map_final_rel_path, chunk.final_rel_path); bun.copy(u8, source_map_final_rel_path[chunk.final_rel_path.len..], ".map"); @@ -443,8 +443,8 @@ pub fn generateChunksInParallel( fdpath[chunk.final_rel_path.len..][0..bun.bytecode_extension.len].* = bun.bytecode_extension.*; break :brk options.OutputFile.init(.{ - .output_path = bun.default_allocator.dupe(u8, source_provider_url_str.slice()) catch unreachable, - .input_path = std.fmt.allocPrint(bun.default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch unreachable, + .output_path = bun.default_allocator.dupe(u8, source_provider_url_str.slice()) catch bun.outOfMemory(), + .input_path = std.fmt.allocPrint(bun.default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch bun.outOfMemory(), .input_loader = .js, .hash = if (chunk.template.placeholder.hash != null) bun.hash(bytecode) else null, .output_kind = .bytecode, @@ -462,7 +462,7 @@ pub fn generateChunksInParallel( // an error c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to generate bytecode for {s}", .{ chunk.final_rel_path, - }) catch unreachable; + }) catch bun.outOfMemory(); } } } diff --git a/src/bundler/linker_context/generateCodeForFileInChunkJS.zig b/src/bundler/linker_context/generateCodeForFileInChunkJS.zig index daa8a316dd..fd03de0e42 100644 --- a/src/bundler/linker_context/generateCodeForFileInChunkJS.zig +++ b/src/bundler/linker_context/generateCodeForFileInChunkJS.zig @@ -603,7 +603,8 @@ pub fn generateCodeForFileInChunkJS( if (out_stmts.len == 0) { return .{ .result = .{ - .code = &[_]u8{}, + .code = "", + .code_allocator = bun.default_allocator, .source_map = null, }, }; diff --git a/src/bundler/linker_context/generateCompileResultForJSChunk.zig b/src/bundler/linker_context/generateCompileResultForJSChunk.zig index 3996ab0ce8..cd0b13c8fc 100644 --- a/src/bundler/linker_context/generateCompileResultForJSChunk.zig +++ b/src/bundler/linker_context/generateCompileResultForJSChunk.zig @@ -64,8 +64,8 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon return .{ .javascript = .{ - .result = result, .source_index = part_range.source_index.get(), + .result = result, }, }; } diff --git a/src/bundler/linker_context/postProcessJSChunk.zig b/src/bundler/linker_context/postProcessJSChunk.zig index c8b696ceae..1d4b99a431 100644 --- a/src/bundler/linker_context/postProcessJSChunk.zig +++ b/src/bundler/linker_context/postProcessJSChunk.zig @@ -16,8 +16,8 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu defer arena.deinit(); // Also generate the cross-chunk binding code - var cross_chunk_prefix: []u8 = &.{}; - var cross_chunk_suffix: []u8 = &.{}; + var cross_chunk_prefix: js_printer.PrintResult = undefined; + var cross_chunk_suffix: js_printer.PrintResult = undefined; var runtime_scope: *Scope = &c.graph.ast.items(.module_scope)[c.graph.files.items(.input_file)[Index.runtime.value].get()]; var runtime_members = &runtime_scope.members; @@ -68,7 +68,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu }, chunk.renamer, false, - ).result.code; + ); cross_chunk_suffix = js_printer.print( worker.allocator, c.resolver.opts.target, @@ -81,7 +81,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu }, chunk.renamer, false, - ).result.code; + ); } // Generate the exports for the entry point, if there are any @@ -107,6 +107,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu .input = chunk.unique_key, }, }; + errdefer j.deinit(); const output_format = c.options.output_format; var line_offset: bun.sourcemap.LineColumnOffset.Optional = if (c.options.source_maps != .none) .{ .value = .{} } else .{ .null = {} }; @@ -119,7 +120,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu // Start with the hashbang if there is one. This must be done before the // banner because it only works if it's literally the first character. if (chunk.isEntryPoint()) { - const is_bun = ctx.c.graph.ast.items(.target)[chunk.entry_point.source_index].isBun(); + const is_bun = c.graph.ast.items(.target)[chunk.entry_point.source_index].isBun(); const hashbang = c.graph.ast.items(.hashbang)[chunk.entry_point.source_index]; if (hashbang.len > 0) { @@ -199,10 +200,10 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu else => {}, // no wrapper } - if (cross_chunk_prefix.len > 0) { + if (cross_chunk_prefix.result.code.len > 0) { newline_before_comment = true; - line_offset.advance(cross_chunk_prefix); - j.push(cross_chunk_prefix, bun.default_allocator); + line_offset.advance(cross_chunk_prefix.result.code); + j.push(cross_chunk_prefix.result.code, cross_chunk_prefix.result.code_allocator); } // Concatenate the generated JavaScript chunks together @@ -322,16 +323,16 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu // Stick the entry point tail at the end of the file. Deliberately don't // include any source mapping information for this because it's automatically // generated and doesn't correspond to a location in the input file. - j.push(tail_code, bun.default_allocator); + j.push(tail_code, entry_point_tail.allocator()); } // Put the cross-chunk suffix inside the IIFE - if (cross_chunk_suffix.len > 0) { + if (cross_chunk_suffix.result.code.len > 0) { if (newline_before_comment) { j.pushStatic("\n"); } - j.push(cross_chunk_suffix, bun.default_allocator); + j.push(cross_chunk_suffix.result.code, cross_chunk_suffix.result.code_allocator); } switch (output_format) { @@ -354,7 +355,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu { const input = c.parse_graph.input_files.items(.source)[chunk.entry_point.source_index].path; var buf = MutableString.initEmpty(worker.allocator); - js_printer.quoteForJSONBuffer(input.pretty, &buf, true) catch bun.outOfMemory(); + js_printer.quoteForJSON(input.pretty, &buf, true) catch bun.outOfMemory(); const str = buf.slice(); // worker.allocator is an arena j.pushStatic(str); line_offset.advance(str); @@ -815,6 +816,7 @@ pub fn generateEntryPointTailJS( .source_index = source_index, .result = .{ .result = .{ .code = "", + .code_allocator = bun.default_allocator, } }, }, }; diff --git a/src/http/HTTPThread.zig b/src/http/HTTPThread.zig index 354cf93483..deb4187602 100644 --- a/src/http/HTTPThread.zig +++ b/src/http/HTTPThread.zig @@ -196,7 +196,7 @@ pub fn init(opts: *const InitOpts) void { pub fn onStart(opts: InitOpts) void { Output.Source.configureNamedThread("HTTP Client"); bun.http.default_arena = Arena.init() catch unreachable; - bun.http.default_allocator = bun.http.default_arena.allocator(); + bun.http.default_allocator = bun.default_allocator; const loop = bun.jsc.MiniEventLoop.initGlobal(null); diff --git a/src/js_printer.zig b/src/js_printer.zig index 5d304f8def..60c453a573 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -145,12 +145,6 @@ pub fn estimateLengthForUTF8(input: []const u8, comptime ascii_only: bool, compt return len; } -pub fn quoteForJSON(text: []const u8, output_: MutableString, comptime ascii_only: bool) !MutableString { - var bytes = output_; - try quoteForJSONBuffer(text, &bytes, ascii_only); - return bytes; -} - pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer: Writer, comptime quote_char: u8, comptime ascii_only: bool, comptime json: bool, comptime encoding: strings.Encoding) !void { const text = if (comptime encoding == .utf16) @as([]const u16, @alignCast(std.mem.bytesAsSlice(u16, text_in))) else text_in; if (comptime json and quote_char != '"') @compileError("for json, quote_char must be '\"'"); @@ -347,7 +341,7 @@ pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer: } } } -pub fn quoteForJSONBuffer(text: []const u8, bytes: *MutableString, comptime ascii_only: bool) !void { +pub fn quoteForJSON(text: []const u8, bytes: *MutableString, comptime ascii_only: bool) !void { const writer = bytes.writer(); try bytes.growIfNeeded(estimateLengthForUTF8(text, ascii_only, '"')); @@ -489,28 +483,14 @@ pub const RequireOrImportMeta = struct { }; pub const PrintResult = union(enum) { - result: struct { - code: []u8, - source_map: ?SourceMap.Chunk = null, - }, + result: Success, err: anyerror, - pub fn clone( - this: PrintResult, - allocator: std.mem.Allocator, - ) !PrintResult { - return switch (this) { - .result => PrintResult{ - .result = .{ - .code = try allocator.dupe(u8, this.result.code), - .source_map = this.result.source_map, - }, - }, - .err => PrintResult{ - .err = this.err, - }, - }; - } + pub const Success = struct { + code: []u8, + code_allocator: std.mem.Allocator, + source_map: ?SourceMap.Chunk = null, + }; }; // do not make this a packed struct @@ -5400,6 +5380,10 @@ pub fn NewWriter( return this.ctx.getMutableBuffer(); } + pub fn takeBuffer(this: *Self) MutableString { + return this.ctx.takeBuffer(); + } + pub fn slice(this: *Self) string { return this.ctx.slice(); } @@ -5504,6 +5488,11 @@ pub const BufferWriter = struct { return &this.buffer; } + pub fn takeBuffer(this: *BufferWriter) MutableString { + defer this.buffer = .initEmpty(this.buffer.allocator); + return this.buffer; + } + pub fn getWritten(this: *BufferWriter) []u8 { return this.buffer.list.items; } @@ -5808,11 +5797,13 @@ pub fn printAst( if (comptime FeatureFlags.runtime_transpiler_cache and generate_source_map) { if (opts.source_map_handler) |handler| { - const source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); + var source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); if (opts.runtime_transpiler_cache) |cache| { cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items); } + defer source_maps_chunk.deinit(); + try handler.onSourceMapChunk(source_maps_chunk, source); } else { if (opts.runtime_transpiler_cache) |cache| { @@ -5821,7 +5812,9 @@ pub fn printAst( } } else if (comptime generate_source_map) { if (opts.source_map_handler) |handler| { - try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source); + var chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); + defer chunk.deinit(); + try handler.onSourceMapChunk(chunk, source); } } @@ -6011,9 +6004,12 @@ pub fn printWithWriterAndPlatform( break :brk chunk; } else null; + var buffer = printer.writer.takeBuffer(); + return .{ .result = .{ - .code = written, + .code = buffer.toOwnedSlice(), + .code_allocator = buffer.allocator, .source_map = source_map, }, }; @@ -6062,7 +6058,9 @@ pub fn printCommonJS( if (comptime generate_source_map) { if (opts.source_map_handler) |handler| { - try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source); + var chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); + defer chunk.deinit(); + try handler.onSourceMapChunk(chunk, source); } } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 20e3096910..606db47050 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -1522,15 +1522,9 @@ pub fn appendSourceMapChunk( start_state.original_line += original_line.value; start_state.original_column += original_column.value; - j.push( - appendMappingToBuffer( - MutableString.initEmpty(allocator), - j.lastByte(), - prev_end_state, - start_state, - ).list.items, - allocator, - ); + var str = MutableString.initEmpty(allocator); + appendMappingToBuffer(&str, j.lastByte(), prev_end_state, start_state); + j.push(str.slice(), allocator); // Then append everything after that without modification. j.pushStatic(source_map); @@ -1555,8 +1549,7 @@ pub fn appendSourceMappingURLRemote( } /// This function is extremely hot. -pub fn appendMappingToBuffer(buffer_: MutableString, last_byte: u8, prev_state: SourceMapState, current_state: SourceMapState) MutableString { - var buffer = buffer_; +pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state: SourceMapState, current_state: SourceMapState) void { const needs_comma = last_byte != 0 and last_byte != ';' and last_byte != '"'; const vlqs = [_]VLQ{ @@ -1589,8 +1582,6 @@ pub fn appendMappingToBuffer(buffer_: MutableString, last_byte: u8, prev_state: @memcpy(writable[0..item.len], item.slice()); writable = writable[item.len..]; } - - return buffer; } pub const Chunk = struct { @@ -1610,22 +1601,28 @@ pub const Chunk = struct { /// ignore empty chunks should_ignore: bool = true, - pub const empty: Chunk = .{ - .buffer = MutableString.initEmpty(bun.default_allocator), - .mappings_count = 0, - .end_state = .{}, - .final_generated_column = 0, - .should_ignore = true, - }; + pub fn initEmpty() Chunk { + return .{ + .buffer = MutableString.initEmpty(bun.default_allocator), + .mappings_count = 0, + .end_state = .{}, + .final_generated_column = 0, + .should_ignore = true, + }; + } + + pub fn deinit(this: *Chunk) void { + this.buffer.deinit(); + } pub fn printSourceMapContents( chunk: Chunk, source: *const Logger.Source, - mutable: MutableString, + mutable: *MutableString, include_sources_contents: bool, comptime ascii_only: bool, - ) !MutableString { - return printSourceMapContentsAtOffset( + ) !void { + try printSourceMapContentsAtOffset( chunk, source, mutable, @@ -1638,13 +1635,11 @@ pub const Chunk = struct { pub fn printSourceMapContentsAtOffset( chunk: Chunk, source: *const Logger.Source, - mutable: MutableString, + mutable: *MutableString, include_sources_contents: bool, offset: usize, comptime ascii_only: bool, - ) !MutableString { - var output = mutable; - + ) !void { // attempt to pre-allocate var filename_buf: bun.PathBuffer = undefined; @@ -1657,23 +1652,21 @@ pub const Chunk = struct { filename = filename_buf[0 .. filename.len + 1]; } - output.growIfNeeded( + mutable.growIfNeeded( filename.len + 2 + (source.contents.len * @as(usize, @intFromBool(include_sources_contents))) + (chunk.buffer.list.items.len - offset) + 32 + 39 + 29 + 22 + 20, ) catch unreachable; - try output.append("{\n \"version\":3,\n \"sources\": ["); + try mutable.append("{\n \"version\":3,\n \"sources\": ["); - output = try JSPrinter.quoteForJSON(filename, output, ascii_only); + try JSPrinter.quoteForJSON(filename, mutable, ascii_only); if (include_sources_contents) { - try output.append("],\n \"sourcesContent\": ["); - output = try JSPrinter.quoteForJSON(source.contents, output, ascii_only); + try mutable.append("],\n \"sourcesContent\": ["); + try JSPrinter.quoteForJSON(source.contents, mutable, ascii_only); } - try output.append("],\n \"mappings\": "); - output = try JSPrinter.quoteForJSON(chunk.buffer.list.items[offset..], output, ascii_only); - try output.append(", \"names\": []\n}"); - - return output; + try mutable.append("],\n \"mappings\": "); + try JSPrinter.quoteForJSON(chunk.buffer.list.items[offset..], mutable, ascii_only); + try mutable.append(", \"names\": []\n}"); } // TODO: remove the indirection by having generic functions for SourceMapFormat and NewBuilder. Source maps are always VLQ @@ -1702,6 +1695,10 @@ pub const Chunk = struct { return this.ctx.getBuffer(); } + pub inline fn takeBuffer(this: *Format) MutableString { + return this.ctx.takeBuffer(); + } + pub inline fn getCount(this: Format) usize { return this.ctx.getCount(); } @@ -1714,8 +1711,6 @@ pub const Chunk = struct { offset: usize = 0, approximate_input_line_count: usize = 0, - pub const Format = SourceMapFormat(VLQSourceMap); - pub fn init(allocator: std.mem.Allocator, prepend_count: bool) VLQSourceMap { var map = VLQSourceMap{ .data = MutableString.initEmpty(allocator), @@ -1740,7 +1735,7 @@ pub const Chunk = struct { else 0; - this.data = appendMappingToBuffer(this.data, last_byte, prev_state, current_state); + appendMappingToBuffer(&this.data, last_byte, prev_state, current_state); this.count += 1; } @@ -1752,6 +1747,11 @@ pub const Chunk = struct { return this.data; } + pub fn takeBuffer(this: *VLQSourceMap) MutableString { + defer this.data = .initEmpty(this.data.allocator); + return this.data; + } + pub fn getCount(this: VLQSourceMap) usize { return this.count; } @@ -1760,7 +1760,6 @@ pub const Chunk = struct { pub fn NewBuilder(comptime SourceMapFormatType: type) type { return struct { const ThisBuilder = @This(); - input_source_map: ?*SourceMap = null, source_map: SourceMapper, line_offset_tables: LineOffsetTable.List = .{}, prev_state: SourceMapState = SourceMapState{}, @@ -1791,13 +1790,14 @@ pub const Chunk = struct { pub noinline fn generateChunk(b: *ThisBuilder, output: []const u8) Chunk { b.updateGeneratedLineAndColumn(output); + var buffer = b.source_map.getBuffer(); if (b.prepend_count) { - b.source_map.getBuffer().list.items[0..8].* = @as([8]u8, @bitCast(b.source_map.getBuffer().list.items.len)); - b.source_map.getBuffer().list.items[8..16].* = @as([8]u8, @bitCast(b.source_map.getCount())); - b.source_map.getBuffer().list.items[16..24].* = @as([8]u8, @bitCast(b.approximate_input_line_count)); + buffer.list.items[0..8].* = @as([8]u8, @bitCast(buffer.list.items.len)); + buffer.list.items[8..16].* = @as([8]u8, @bitCast(b.source_map.getCount())); + buffer.list.items[16..24].* = @as([8]u8, @bitCast(b.approximate_input_line_count)); } return Chunk{ - .buffer = b.source_map.getBuffer(), + .buffer = b.source_map.takeBuffer(), .mappings_count = b.source_map.getCount(), .end_state = b.prev_state, .final_generated_column = b.generated_column, @@ -1873,17 +1873,7 @@ pub const Chunk = struct { b.last_generated_update = @as(u32, @truncate(output.len)); } - pub fn appendMapping(b: *ThisBuilder, current_state_: SourceMapState) void { - var current_state = current_state_; - // If the input file had a source map, map all the way back to the original - if (b.input_source_map) |input| { - if (input.find(current_state.original_line, current_state.original_column)) |mapping| { - current_state.source_index = mapping.sourceIndex(); - current_state.original_line = mapping.originalLine(); - current_state.original_column = mapping.originalColumn(); - } - } - + pub fn appendMapping(b: *ThisBuilder, current_state: SourceMapState) void { b.appendMappingWithoutRemapping(current_state); } diff --git a/src/string.zig b/src/string.zig index fddb9b320a..4daebed6fc 100644 --- a/src/string.zig +++ b/src/string.zig @@ -757,7 +757,7 @@ pub const String = extern struct { pub fn toThreadSafeSlice(this: *const String, allocator: std.mem.Allocator) SliceWithUnderlyingString { if (this.tag == .WTFStringImpl) { if (!this.value.WTFStringImpl.isThreadSafe()) { - const slice = this.value.WTFStringImpl.toUTF8WithoutRef(allocator); + const slice = this.value.WTFStringImpl.toUTF8(allocator); if (slice.allocator.isNull()) { // this was a WTF-allocated string @@ -769,8 +769,8 @@ pub const String = extern struct { } if (comptime bun.Environment.allow_assert) { - bun.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator - bun.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same + // bun.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator + // bun.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same } // We've already cloned the string, so let's just return the slice. diff --git a/src/string/MutableString.zig b/src/string/MutableString.zig index 643e51ca39..42e22b2b3d 100644 --- a/src/string/MutableString.zig +++ b/src/string/MutableString.zig @@ -240,7 +240,7 @@ pub inline fn lenI(self: *MutableString) i32 { return @as(i32, @intCast(self.list.items.len)); } -pub fn toOwnedSlice(self: *MutableString) string { +pub fn toOwnedSlice(self: *MutableString) []u8 { return self.list.toOwnedSlice(self.allocator) catch bun.outOfMemory(); // TODO } diff --git a/src/string/StringJoiner.zig b/src/string/StringJoiner.zig index c18a7a54e0..bb2083d053 100644 --- a/src/string/StringJoiner.zig +++ b/src/string/StringJoiner.zig @@ -104,6 +104,20 @@ pub fn done(this: *StringJoiner, allocator: Allocator) ![]u8 { return slice; } +pub fn deinit(this: *StringJoiner) void { + var current: ?*Node = this.head orelse { + assert(this.tail == null); + assert(this.len == 0); + return; + }; + + while (current) |node| { + const prev = node; + current = node.next; + prev.deinit(this.allocator); + } +} + /// Same as `.done`, but appends extra slice `end` pub fn doneWithEnd(this: *StringJoiner, allocator: Allocator, end: []const u8) ![]u8 { var current: ?*Node = this.head orelse { diff --git a/src/threading/ThreadPool.zig b/src/threading/ThreadPool.zig index 610d7440b7..5862ccb281 100644 --- a/src/threading/ThreadPool.zig +++ b/src/threading/ThreadPool.zig @@ -550,6 +550,8 @@ pub const Thread = struct { /// Thread entry point which runs a worker for the ThreadPool fn run(thread_pool: *ThreadPool) void { + bun.mimalloc.mi_thread_set_in_threadpool(); + { var counter_buf: [100]u8 = undefined; const int = counter.fetchAdd(1, .seq_cst); diff --git a/test/bake/dev/ecosystem.test.ts b/test/bake/dev/ecosystem.test.ts index e93a39ff6d..0f2aece4fa 100644 --- a/test/bake/dev/ecosystem.test.ts +++ b/test/bake/dev/ecosystem.test.ts @@ -12,6 +12,7 @@ import { devTest } from "../bake-harness"; devTest("svelte component islands example", { fixture: "svelte-component-islands", timeoutMultiplier: 2, + skip: ["win32"], async test(dev) { const html = await dev.fetch("/").text(); if (html.includes("Bun__renderFallbackError")) throw new Error("failed"); diff --git a/test/internal/ban-limits.json b/test/internal/ban-limits.json index 7fdd86cd88..a0fa9945b4 100644 --- a/test/internal/ban-limits.json +++ b/test/internal/ban-limits.json @@ -7,7 +7,7 @@ ".stdDir()": 40, ".stdFile()": 18, "// autofix": 168, - ": [a-zA-Z0-9_\\.\\*\\?\\[\\]\\(\\)]+ = undefined,": 230, + ": [a-zA-Z0-9_\\.\\*\\?\\[\\]\\(\\)]+ = undefined,": 229, "== alloc.ptr": 0, "== allocator.ptr": 0, "@import(\"bun\").": 0, diff --git a/test/js/bun/http/body-leak-test-fixture.ts b/test/js/bun/http/body-leak-test-fixture.ts index 7c50ad8848..a8713fa094 100644 --- a/test/js/bun/http/body-leak-test-fixture.ts +++ b/test/js/bun/http/body-leak-test-fixture.ts @@ -39,9 +39,7 @@ const server = Bun.serve({ } } else if (url.endsWith("/incomplete-streaming")) { const reader = req.body?.getReader(); - if (!reader) { - reader?.read(); - } + await reader?.read(); } else if (url.endsWith("/streaming-echo")) { return new Response(req.body, { headers: { diff --git a/test/js/bun/perf/static-initializers.test.ts b/test/js/bun/perf/static-initializers.test.ts index 1977f495f6..9e645cba89 100644 --- a/test/js/bun/perf/static-initializers.test.ts +++ b/test/js/bun/perf/static-initializers.test.ts @@ -64,6 +64,6 @@ describe("static initializers", () => { expect( bunInitializers.length, `Do not add static initializers to Bun. Static initializers are called when Bun starts up, regardless of whether you use the variables or not. This makes Bun slower.`, - ).toBe(process.arch == "arm64" ? 1 : 2); + ).toBe(process.arch == "arm64" ? 2 : 3); }); });