diff --git a/cmake/targets/BuildMimalloc.cmake b/cmake/targets/BuildMimalloc.cmake index 7f4e6167f8..c01e9ca83e 100644 --- a/cmake/targets/BuildMimalloc.cmake +++ b/cmake/targets/BuildMimalloc.cmake @@ -4,7 +4,7 @@ register_repository( REPOSITORY oven-sh/mimalloc COMMIT - 1cef3e8f4167733818f1883b2f3a9dd4754224cf + 1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a ) set(MIMALLOC_CMAKE_ARGS @@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS -DMI_BUILD_TESTS=OFF -DMI_USE_CXX=ON -DMI_SKIP_COLLECT_ON_EXIT=ON - + # ``` # ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js # Started development server: http://localhost:3004 @@ -64,13 +64,13 @@ if(ENABLE_VALGRIND) list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON) endif() -# Enable SIMD optimizations when not building for baseline (older CPUs) -if(NOT ENABLE_BASELINE) - list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_ARCH=ON) - list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON) -endif() - -if(DEBUG) +if(WIN32) + if(DEBUG) + set(MIMALLOC_LIBRARY mimalloc-static-debug) + else() + set(MIMALLOC_LIBRARY mimalloc-static) + endif() +elseif(DEBUG) if (ENABLE_ASAN) set(MIMALLOC_LIBRARY mimalloc-asan-debug) else() @@ -86,7 +86,6 @@ if(APPLE OR (LINUX AND NOT DEBUG)) set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o) endif() - register_cmake_command( TARGET mimalloc diff --git a/src/allocators/AllocationScope.zig b/src/allocators/AllocationScope.zig index a37e3fa555..324468dd03 100644 --- a/src/allocators/AllocationScope.zig +++ b/src/allocators/AllocationScope.zig @@ -216,7 +216,7 @@ pub fn trackExternalAllocation(scope: *AllocationScope, ptr: []const u8, ret_add /// Call when the pointer from `trackExternalAllocation` is freed. /// Returns true if the free was invalid. pub fn trackExternalFree(scope: *AllocationScope, slice: anytype, ret_addr: ?usize) bool { - if (comptime !enabled) return false; + if (comptime !enabled) return; const ptr: []const u8 = switch (@typeInfo(@TypeOf(slice))) { .pointer => |p| switch (p.size) { .slice => brk: { diff --git a/src/allocators/basic.zig b/src/allocators/basic.zig index 916d1871e4..acf6e9590d 100644 --- a/src/allocators/basic.zig +++ b/src/allocators/basic.zig @@ -13,7 +13,8 @@ fn mimalloc_free( // but its good to have that assertion // let's only enable it in debug mode if (comptime Environment.isDebug) { - if (mimalloc.mustUseAlignedAlloc(alignment)) + assert(mimalloc.mi_is_in_heap_region(buf.ptr)); + if (mimalloc.canUseAlignedAlloc(buf.len, alignment.toByteUnits())) mimalloc.mi_free_size_aligned(buf.ptr, buf.len, alignment.toByteUnits()) else mimalloc.mi_free_size(buf.ptr, buf.len); @@ -24,6 +25,7 @@ fn mimalloc_free( const MimallocAllocator = struct { pub const supports_posix_memalign = true; + fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 { if (comptime Environment.enable_logs) log("mi_alloc({d}, {d})", .{ len, alignment.toByteUnits() }); @@ -146,6 +148,7 @@ const Environment = @import("../env.zig"); const std = @import("std"); const bun = @import("bun"); +const assert = bun.assert; const mimalloc = bun.mimalloc; const mem = @import("std").mem; diff --git a/src/allocators/mimalloc.zig b/src/allocators/mimalloc.zig index 88fce6b8a3..3635b14c9b 100644 --- a/src/allocators/mimalloc.zig +++ b/src/allocators/mimalloc.zig @@ -127,14 +127,13 @@ pub extern fn mi_reserve_huge_os_pages_at(pages: usize, numa_node: c_int, timeou pub extern fn mi_reserve_os_memory(size: usize, commit: bool, allow_large: bool) c_int; pub extern fn mi_manage_os_memory(start: ?*anyopaque, size: usize, is_committed: bool, is_large: bool, is_zero: bool, numa_node: c_int) bool; pub extern fn mi_debug_show_arenas() void; -pub const ArenaID = ?*anyopaque; -pub extern fn mi_arena_area(arena_id: ArenaID, size: *usize) ?*anyopaque; +pub const ArenaID = c_int; +pub extern fn mi_arena_area(arena_id: ArenaID, size: [*c]usize) ?*anyopaque; pub extern fn mi_reserve_huge_os_pages_at_ex(pages: usize, numa_node: c_int, timeout_msecs: usize, exclusive: bool, arena_id: *ArenaID) c_int; pub extern fn mi_reserve_os_memory_ex(size: usize, commit: bool, allow_large: bool, exclusive: bool, arena_id: *ArenaID) c_int; pub extern fn mi_manage_os_memory_ex(start: ?*anyopaque, size: usize, is_committed: bool, is_large: bool, is_zero: bool, numa_node: c_int, exclusive: bool, arena_id: *ArenaID) bool; pub extern fn mi_heap_new_in_arena(arena_id: ArenaID) ?*Heap; pub extern fn mi_reserve_huge_os_pages(pages: usize, max_secs: f64, pages_reserved: [*c]usize) c_int; -pub extern fn mi_thread_set_in_threadpool() void; pub const Option = enum(c_uint) { show_errors = 0, show_stats = 1, @@ -227,7 +226,4 @@ pub fn mustUseAlignedAlloc(alignment: std.mem.Alignment) bool { return alignment.toByteUnits() > MI_MAX_ALIGN_SIZE; } -pub const mi_arena_id_t = ?*anyopaque; -pub extern fn mi_heap_new_ex(heap_tag: c_int, allow_destroy: bool, arena_id: mi_arena_id_t) ?*Heap; - const std = @import("std"); diff --git a/src/ast/Expr.zig b/src/ast/Expr.zig index a8b3d60b09..9d5d1def34 100644 --- a/src/ast/Expr.zig +++ b/src/ast/Expr.zig @@ -96,7 +96,7 @@ pub fn fromBlob( if (mime_type.category.isTextLike()) { var output = MutableString.initEmpty(allocator); - try JSPrinter.quoteForJSON(bytes, &output, true); + output = try JSPrinter.quoteForJSON(bytes, output, true); var list = output.toOwnedSlice(); // remove the quotes if (list.len > 0) { diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 0359fa3dd5..0c703c3bc4 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -2070,7 +2070,7 @@ pub fn finalizeBundle( .gts = undefined, }; - const quoted_source_contents: []?[]u8 = bv2.linker.graph.files.items(.quoted_source_contents); + const quoted_source_contents: []const []const u8 = bv2.linker.graph.files.items(.quoted_source_contents); // Pass 1, update the graph's nodes, resolving every bundler source // index into its `IncrementalGraph(...).FileIndex` for ( @@ -2083,7 +2083,7 @@ pub fn finalizeBundle( bun.assert(compile_result.javascript.result == .result); bun.assert(dev.server_transpiler.options.source_map != .none); bun.assert(!part_range.source_index.isRuntime()); - break :brk .initEmpty(); + break :brk .empty; }; // TODO: investigate why linker.files is not indexed by linker's index // const linker_index = bv2.linker.graph.stable_source_indices[index.get()]; @@ -2096,16 +2096,13 @@ pub fn finalizeBundle( }).receiveChunk( &ctx, index, - .{ - .js = .{ - .code = compile_result.javascript.code(), - .code_allocator = compile_result.javascript.allocator(), - .source_map = .{ - .chunk = source_map, - .escaped_source = quoted_contents, - }, + .{ .js = .{ + .code = compile_result.code(), + .source_map = .{ + .chunk = source_map, + .escaped_source = @constCast(quoted_contents), }, - }, + } }, graph == .ssr, ), } @@ -2191,7 +2188,6 @@ pub fn finalizeBundle( index, .{ .js = .{ .code = generated_js, - .code_allocator = dev.allocator, .source_map = null, } }, false, @@ -2992,9 +2988,6 @@ fn sendBuiltInNotFound(resp: anytype) void { } fn printMemoryLine(dev: *DevServer) void { - if (comptime !bun.Environment.enableAllocScopes) { - return; - } if (!debug.isVisible()) return; Output.prettyErrorln("DevServer tracked {}, measured: {} ({}), process: {}", .{ bun.fmt.size(dev.memoryCost(), .{}), diff --git a/src/bake/DevServer/IncrementalGraph.zig b/src/bake/DevServer/IncrementalGraph.zig index beed6f8a6a..dbb455befe 100644 --- a/src/bake/DevServer/IncrementalGraph.zig +++ b/src/bake/DevServer/IncrementalGraph.zig @@ -126,15 +126,12 @@ pub fn IncrementalGraph(side: bake.Side) type { .client => struct { /// Content depends on `flags.kind` /// See function wrappers to safely read into this data - content: union { - /// Access contents with `.jsCode()`. + content: extern union { + /// Allocated by `dev.allocator`. Access with `.jsCode()` /// When stale, the code is "", otherwise it contains at /// least one non-whitespace character, as empty chunks /// contain at least a function wrapper. - js_code: struct { - ptr: [*]const u8, - allocator: std.mem.Allocator, - }, + js_code_ptr: [*]const u8, /// Access with `.cssAssetId()` css_asset_id: u64, @@ -182,20 +179,18 @@ pub fn IncrementalGraph(side: bake.Side) type { }; comptime { - if (@import("builtin").mode == .ReleaseFast or @import("builtin").mode == .ReleaseSmall) { - bun.assert_eql(@sizeOf(@This()), @sizeOf(u64) * 5); - bun.assert_eql(@alignOf(@This()), @alignOf([*]u8)); + const d = std.debug; + if (!Environment.isDebug) { + d.assert(@sizeOf(@This()) == @sizeOf(u64) * 3); + d.assert(@alignOf(@This()) == @alignOf([*]u8)); } } - fn initJavaScript(code_slice: []const u8, code_allocator: std.mem.Allocator, flags: Flags, source_map: PackedMap.RefOrEmpty) @This() { + fn initJavaScript(code_slice: []const u8, flags: Flags, source_map: PackedMap.RefOrEmpty) @This() { assert(flags.kind == .js or flags.kind == .asset); assert(flags.source_map_state == std.meta.activeTag(source_map)); return .{ - .content = .{ .js_code = .{ - .ptr = code_slice.ptr, - .allocator = code_allocator, - } }, + .content = .{ .js_code_ptr = code_slice.ptr }, .code_len = @intCast(code_slice.len), .flags = flags, .source_map = source_map.untag(), @@ -225,12 +220,7 @@ pub fn IncrementalGraph(side: bake.Side) type { fn jsCode(file: @This()) []const u8 { assert(file.flags.kind.hasInlinejscodeChunk()); - return file.content.js_code.ptr[0..file.code_len]; - } - - fn freeJsCode(file: *@This()) void { - assert(file.flags.kind.hasInlinejscodeChunk()); - file.content.js_code.allocator.free(file.jsCode()); + return file.content.js_code_ptr[0..file.code_len]; } fn cssAssetId(file: @This()) u64 { @@ -260,7 +250,7 @@ pub fn IncrementalGraph(side: bake.Side) type { fn freeFileContent(g: *IncrementalGraph(.client), key: []const u8, file: *File, css: enum { unref_css, ignore_css }) void { switch (file.flags.kind) { .js, .asset => { - file.freeJsCode(); + g.owner().allocator.free(file.jsCode()); switch (file.sourceMap()) { .ref => |ptr| { ptr.derefWithContext(g.owner()); @@ -396,10 +386,9 @@ pub fn IncrementalGraph(side: bake.Side) type { content: union(enum) { js: struct { code: []const u8, - code_allocator: std.mem.Allocator, source_map: ?struct { chunk: SourceMap.Chunk, - escaped_source: ?[]u8, + escaped_source: []u8, }, }, css: u64, @@ -486,22 +475,24 @@ pub fn IncrementalGraph(side: bake.Side) type { switch (content) { .css => |css| gop.value_ptr.* = .initCSS(css, flags), .js => |js| { + dev.allocation_scope.assertOwned(js.code); + // Insert new source map or patch existing empty source map. const source_map: PackedMap.RefOrEmpty = brk: { if (js.source_map) |source_map| { bun.debugAssert(!flags.is_html_route); // suspect behind #17956 if (source_map.chunk.buffer.len() > 0) { + dev.allocation_scope.assertOwned(source_map.chunk.buffer.list.items); + dev.allocation_scope.assertOwned(source_map.escaped_source); flags.source_map_state = .ref; break :brk .{ .ref = PackedMap.newNonEmpty( source_map.chunk, - source_map.escaped_source.?, + source_map.escaped_source, ) }; } var take = source_map.chunk.buffer; take.deinit(); - if (source_map.escaped_source) |escaped_source| { - bun.default_allocator.free(escaped_source); - } + dev.allocator.free(source_map.escaped_source); } // Must precompute this. Otherwise, source maps won't have @@ -517,7 +508,7 @@ pub fn IncrementalGraph(side: bake.Side) type { } }; }; - gop.value_ptr.* = .initJavaScript(js.code, js.code_allocator, flags, source_map); + gop.value_ptr.* = .initJavaScript(js.code, flags, source_map); // Track JavaScript chunks for concatenation try g.current_chunk_parts.append(dev.allocator, file_index); @@ -588,9 +579,7 @@ pub fn IncrementalGraph(side: bake.Side) type { if (content.js.source_map) |source_map| { var take = source_map.chunk.buffer; take.deinit(); - if (source_map.escaped_source) |escaped_source| { - bun.default_allocator.free(escaped_source); - } + dev.allocator.free(source_map.escaped_source); } } }, diff --git a/src/bake/DevServer/PackedMap.zig b/src/bake/DevServer/PackedMap.zig index 83fb0922e7..7a2d2f840a 100644 --- a/src/bake/DevServer/PackedMap.zig +++ b/src/bake/DevServer/PackedMap.zig @@ -11,7 +11,6 @@ ref_count: RefCount, /// This is stored to allow lazy construction of source map files. vlq_ptr: [*]u8, vlq_len: u32, -vlq_allocator: std.mem.Allocator, /// The bundler runs quoting on multiple threads, so it only makes /// sense to preserve that effort for concatenation and /// re-concatenation. @@ -32,26 +31,24 @@ end_state: struct { /// already counted for. bits_used_for_memory_cost_dedupe: u32 = 0, -pub fn newNonEmpty(chunk: SourceMap.Chunk, quoted_contents: []u8) bun.ptr.RefPtr(PackedMap) { - assert(chunk.buffer.list.items.len > 0); - var buffer = chunk.buffer; - const slice = buffer.toOwnedSlice(); +pub fn newNonEmpty(source_map: SourceMap.Chunk, quoted_contents: []u8) bun.ptr.RefPtr(PackedMap) { + assert(source_map.buffer.list.items.len > 0); return .new(.{ .ref_count = .init(), - .vlq_ptr = slice.ptr, - .vlq_len = @intCast(slice.len), - .vlq_allocator = buffer.allocator, + .vlq_ptr = source_map.buffer.list.items.ptr, + .vlq_len = @intCast(source_map.buffer.list.items.len), .quoted_contents_ptr = quoted_contents.ptr, .quoted_contents_len = @intCast(quoted_contents.len), .end_state = .{ - .original_line = chunk.end_state.original_line, - .original_column = chunk.end_state.original_column, + .original_line = source_map.end_state.original_line, + .original_column = source_map.end_state.original_column, }, }); } -fn destroy(self: *@This(), _: *DevServer) void { - self.vlq_allocator.free(self.vlq()); +fn destroy(self: *@This(), dev: *DevServer) void { + dev.allocator.free(self.vlq()); + dev.allocator.free(self.quotedContents()); bun.destroy(self); } @@ -81,7 +78,7 @@ pub fn quotedContents(self: *const @This()) []u8 { comptime { if (!Environment.isDebug) { - assert_eql(@sizeOf(@This()), @sizeOf(usize) * 7); + assert_eql(@sizeOf(@This()), @sizeOf(usize) * 5); assert_eql(@alignOf(@This()), @alignOf(usize)); } } @@ -159,8 +156,6 @@ pub const RefOrEmpty = union(enum(u1)) { }; }; -const std = @import("std"); - const bun = @import("bun"); const Environment = bun.Environment; const SourceMap = bun.sourcemap; diff --git a/src/bun.js.zig b/src/bun.js.zig index 3def2c2445..d05a6818ea 100644 --- a/src/bun.js.zig +++ b/src/bun.js.zig @@ -23,7 +23,7 @@ pub const Run = struct { js_ast.Expr.Data.Store.create(); js_ast.Stmt.Data.Store.create(); - const arena = try Arena.init(); + var arena = try Arena.init(); if (!ctx.debug.loaded_bunfig) { try bun.cli.Arguments.loadConfigPath(ctx.allocator, true, "bunfig.toml", ctx, .RunCommand); @@ -31,7 +31,7 @@ pub const Run = struct { run = .{ .vm = try VirtualMachine.initWithModuleGraph(.{ - .allocator = bun.default_allocator, + .allocator = arena.allocator(), .log = ctx.log, .args = ctx.args, .graph = graph_ptr, @@ -48,7 +48,7 @@ pub const Run = struct { vm.preload = ctx.preloads; vm.argv = ctx.passthrough; vm.arena = &run.arena; - vm.allocator = bun.default_allocator; + vm.allocator = arena.allocator(); b.options.install = ctx.install; b.resolver.opts.install = ctx.install; @@ -160,12 +160,12 @@ pub const Run = struct { js_ast.Expr.Data.Store.create(); js_ast.Stmt.Data.Store.create(); - const arena = try Arena.init(); + var arena = try Arena.init(); run = .{ .vm = try VirtualMachine.init( .{ - .allocator = bun.default_allocator, + .allocator = arena.allocator(), .log = ctx.log, .args = ctx.args, .store_fd = ctx.debug.hot_reload != .none, @@ -187,7 +187,7 @@ pub const Run = struct { vm.preload = ctx.preloads; vm.argv = ctx.passthrough; vm.arena = &run.arena; - vm.allocator = bun.default_allocator; + vm.allocator = arena.allocator(); if (ctx.runtime_options.eval.script.len > 0) { const script_source = try bun.default_allocator.create(logger.Source); diff --git a/src/bun.js/SavedSourceMap.zig b/src/bun.js/SavedSourceMap.zig index 706a748507..68fcdb75e9 100644 --- a/src/bun.js/SavedSourceMap.zig +++ b/src/bun.js/SavedSourceMap.zig @@ -166,7 +166,7 @@ pub fn deinit(this: *SavedSourceMap) void { } pub fn putMappings(this: *SavedSourceMap, source: *const logger.Source, mappings: MutableString) !void { - try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, try bun.default_allocator.dupe(u8, mappings.list.items)))); + try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, mappings.list.items.ptr))); } pub fn putValue(this: *SavedSourceMap, path: []const u8, value: Value) !void { diff --git a/src/bun.js/VirtualMachine.zig b/src/bun.js/VirtualMachine.zig index 0525e201fc..67bf251a07 100644 --- a/src/bun.js/VirtualMachine.zig +++ b/src/bun.js/VirtualMachine.zig @@ -194,8 +194,6 @@ commonjs_custom_extensions: bun.StringArrayHashMapUnmanaged(node_module_module.C /// The value is decremented when defaults are restored. has_mutated_built_in_extensions: u32 = 0, -initial_script_execution_context_identifier: i32, - pub const ProcessAutoKiller = @import("./ProcessAutoKiller.zig"); pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSGlobalObject, JSValue) void; @@ -369,7 +367,7 @@ const SourceMapHandlerGetter = struct { pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void { var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator); defer temp_json_buffer.deinit(); - try chunk.printSourceMapContentsAtOffset(source, &temp_json_buffer, true, SavedSourceMap.vlq_offset, true); + temp_json_buffer = try chunk.printSourceMapContentsAtOffset(source, temp_json_buffer, true, SavedSourceMap.vlq_offset, true); const source_map_url_prefix_start = "//# sourceMappingURL=data:application/json;base64,"; // TODO: do we need to %-encode the path? const source_url_len = source.path.text.len; @@ -986,7 +984,6 @@ pub fn initWithModuleGraph( .standalone_module_graph = opts.graph.?, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, - .initial_script_execution_context_identifier = if (opts.is_main_thread) 1 else std.math.maxInt(i32), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( @@ -1019,7 +1016,7 @@ pub fn initWithModuleGraph( vm.global = JSGlobalObject.create( vm, vm.console, - vm.initial_script_execution_context_identifier, + if (opts.is_main_thread) 1 else std.math.maxInt(i32), false, false, null, @@ -1108,7 +1105,6 @@ pub fn init(opts: Options) !*VirtualMachine { .ref_strings_mutex = .{}, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, - .initial_script_execution_context_identifier = if (opts.is_main_thread) 1 else std.math.maxInt(i32), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( @@ -1138,7 +1134,7 @@ pub fn init(opts: Options) !*VirtualMachine { vm.global = JSGlobalObject.create( vm, vm.console, - vm.initial_script_execution_context_identifier, + if (opts.is_main_thread) 1 else std.math.maxInt(i32), opts.smol, opts.eval, null, @@ -1268,7 +1264,6 @@ pub fn initWorker( .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), // This option is irrelevant for Workers .destruct_main_thread_on_exit = false, - .initial_script_execution_context_identifier = @as(i32, @intCast(worker.execution_context_id)), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( @@ -1302,7 +1297,7 @@ pub fn initWorker( vm.global = JSGlobalObject.create( vm, vm.console, - vm.initial_script_execution_context_identifier, + @as(i32, @intCast(worker.execution_context_id)), worker.mini, opts.eval, worker.cpp_worker, @@ -1360,7 +1355,6 @@ pub fn initBake(opts: Options) anyerror!*VirtualMachine { .ref_strings_mutex = .{}, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, - .initial_script_execution_context_identifier = if (opts.is_main_thread) 1 else std.math.maxInt(i32), }; vm.source_mappings.init(&vm.saved_source_map_table); vm.regular_event_loop.tasks = EventLoop.Queue.init( diff --git a/src/bun.js/api/Timer/WTFTimer.zig b/src/bun.js/api/Timer/WTFTimer.zig index e93883d760..e91d0d321e 100644 --- a/src/bun.js/api/Timer/WTFTimer.zig +++ b/src/bun.js/api/Timer/WTFTimer.zig @@ -14,7 +14,6 @@ event_loop_timer: EventLoopTimer, imminent: *std.atomic.Value(?*WTFTimer), repeat: bool, lock: bun.Mutex = .{}, -script_execution_context_id: bun.webcore.ScriptExecutionContext.Identifier, const new = bun.TrivialNew(WTFTimer); @@ -57,13 +56,9 @@ pub fn update(this: *WTFTimer, seconds: f64, repeat: bool) void { pub fn cancel(this: *WTFTimer) void { this.lock.lock(); defer this.lock.unlock(); - - if (this.script_execution_context_id.valid()) { - this.imminent.store(null, .seq_cst); - - if (this.event_loop_timer.state == .ACTIVE) { - this.vm.timer.remove(&this.event_loop_timer); - } + this.imminent.store(null, .seq_cst); + if (this.event_loop_timer.state == .ACTIVE) { + this.vm.timer.remove(&this.event_loop_timer); } } @@ -102,7 +97,6 @@ export fn WTFTimer__create(run_loop_timer: *RunLoopTimer) ?*anyopaque { }, .run_loop_timer = run_loop_timer, .repeat = false, - .script_execution_context_id = @enumFromInt(vm.initial_script_execution_context_identifier), }); return this; diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 119bbe17a3..28efbef99d 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -60,7 +60,8 @@ pub fn NewSocket(comptime ssl: bool) type { flags: Flags = .{}, ref_count: RefCount, wrapped: WrappedType = .none, - handlers: ?*Handlers, + // TODO: make this optional + handlers: *Handlers, this_value: jsc.JSValue = .zero, poll_ref: Async.KeepAlive = Async.KeepAlive.init(), ref_pollref_on_connect: bool = true, @@ -207,7 +208,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn handleError(this: *This, err_value: jsc.JSValue) void { log("handleError", .{}); - const handlers = this.getHandlers(); + const handlers = this.handlers; var vm = handlers.vm; if (vm.isShuttingDown()) { return; @@ -225,7 +226,7 @@ pub fn NewSocket(comptime ssl: bool) type { jsc.markBinding(@src()); if (this.socket.isDetached()) return; if (this.native_callback.onWritable()) return; - const handlers = this.getHandlers(); + const handlers = this.handlers; const callback = handlers.onWritable; if (callback == .zero) return; @@ -255,8 +256,8 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onTimeout(this: *This, _: Socket) void { jsc.markBinding(@src()); if (this.socket.isDetached()) return; - const handlers = this.getHandlers(); - log("onTimeout {s}", .{if (handlers.is_server) "S" else "C"}); + log("onTimeout {s}", .{if (this.handlers.is_server) "S" else "C"}); + const handlers = this.handlers; const callback = handlers.onTimeout; if (callback == .zero or this.flags.finalizing) return; if (handlers.vm.isShuttingDown()) { @@ -275,13 +276,8 @@ pub fn NewSocket(comptime ssl: bool) type { }; } - pub fn getHandlers(this: *const This) *Handlers { - return this.handlers orelse @panic("No handlers set on Socket"); - } - pub fn handleConnectError(this: *This, errno: c_int) void { - const handlers = this.getHandlers(); - log("onConnectError {s} ({d}, {d})", .{ if (handlers.is_server) "S" else "C", errno, this.ref_count.active_counts }); + log("onConnectError {s} ({d}, {d})", .{ if (this.handlers.is_server) "S" else "C", errno, this.ref_count.active_counts }); // Ensure the socket is still alive for any defer's we have this.ref(); defer this.deref(); @@ -292,6 +288,7 @@ pub fn NewSocket(comptime ssl: bool) type { defer this.markInactive(); defer if (needs_deref) this.deref(); + const handlers = this.handlers; const vm = handlers.vm; this.poll_ref.unrefOnNextTick(vm); if (vm.isShuttingDown()) { @@ -360,7 +357,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn markActive(this: *This) void { if (!this.flags.is_active) { - this.getHandlers().markActive(); + this.handlers.markActive(); this.flags.is_active = true; this.has_pending_activity.store(true, .release); } @@ -388,20 +385,15 @@ pub fn NewSocket(comptime ssl: bool) type { } this.flags.is_active = false; - const handlers = this.getHandlers(); - const vm = handlers.vm; - handlers.markInactive(); + const vm = this.handlers.vm; + this.handlers.markInactive(); this.poll_ref.unref(vm); this.has_pending_activity.store(false, .release); } } - pub fn isServer(this: *const This) bool { - return this.getHandlers().is_server; - } - pub fn onOpen(this: *This, socket: Socket) void { - log("onOpen {s} {*} {} {}", .{ if (this.isServer()) "S" else "C", this, this.socket.isDetached(), this.ref_count.active_counts }); + log("onOpen {s} {*} {} {}", .{ if (this.handlers.is_server) "S" else "C", this, this.socket.isDetached(), this.ref_count.active_counts }); // Ensure the socket remains alive until this is finished this.ref(); defer this.deref(); @@ -433,7 +425,7 @@ pub fn NewSocket(comptime ssl: bool) type { } } if (this.protos) |protos| { - if (this.isServer()) { + if (this.handlers.is_server) { BoringSSL.SSL_CTX_set_alpn_select_cb(BoringSSL.SSL_get_SSL_CTX(ssl_ptr), selectALPNCallback, bun.cast(*anyopaque, this)); } else { _ = BoringSSL.SSL_set_alpn_protos(ssl_ptr, protos.ptr, @as(c_uint, @intCast(protos.len))); @@ -449,7 +441,7 @@ pub fn NewSocket(comptime ssl: bool) type { } } - const handlers = this.getHandlers(); + const handlers = this.handlers; const callback = handlers.onOpen; const handshake_callback = handlers.onHandshake; @@ -501,12 +493,13 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onEnd(this: *This, _: Socket) void { jsc.markBinding(@src()); if (this.socket.isDetached()) return; - const handlers = this.getHandlers(); - log("onEnd {s}", .{if (handlers.is_server) "S" else "C"}); + log("onEnd {s}", .{if (this.handlers.is_server) "S" else "C"}); // Ensure the socket remains alive until this is finished this.ref(); defer this.deref(); + const handlers = this.handlers; + const callback = handlers.onEnd; if (callback == .zero or handlers.vm.isShuttingDown()) { this.poll_ref.unref(handlers.vm); @@ -532,13 +525,13 @@ pub fn NewSocket(comptime ssl: bool) type { jsc.markBinding(@src()); this.flags.handshake_complete = true; if (this.socket.isDetached()) return; - const handlers = this.getHandlers(); - log("onHandshake {s} ({d})", .{ if (handlers.is_server) "S" else "C", success }); + log("onHandshake {s} ({d})", .{ if (this.handlers.is_server) "S" else "C", success }); const authorized = if (success == 1) true else false; this.flags.authorized = authorized; + const handlers = this.handlers; var callback = handlers.onHandshake; var is_open = false; @@ -574,8 +567,8 @@ pub fn NewSocket(comptime ssl: bool) type { // clean onOpen callback so only called in the first handshake and not in every renegotiation // on servers this would require a different approach but it's not needed because our servers will not call handshake multiple times // servers don't support renegotiation - this.handlers.?.onOpen.unprotect(); - this.handlers.?.onOpen = .zero; + this.handlers.onOpen.unprotect(); + this.handlers.onOpen = .zero; } } else { // call handhsake callback with authorized and authorization error if has one @@ -598,8 +591,7 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onClose(this: *This, _: Socket, err: c_int, _: ?*anyopaque) void { jsc.markBinding(@src()); - const handlers = this.getHandlers(); - log("onClose {s}", .{if (handlers.is_server) "S" else "C"}); + log("onClose {s}", .{if (this.handlers.is_server) "S" else "C"}); this.detachNativeCallback(); this.socket.detach(); defer this.deref(); @@ -609,6 +601,7 @@ pub fn NewSocket(comptime ssl: bool) type { return; } + const handlers = this.handlers; const vm = handlers.vm; this.poll_ref.unref(vm); @@ -645,10 +638,10 @@ pub fn NewSocket(comptime ssl: bool) type { pub fn onData(this: *This, _: Socket, data: []const u8) void { jsc.markBinding(@src()); if (this.socket.isDetached()) return; - const handlers = this.getHandlers(); - log("onData {s} ({d})", .{ if (handlers.is_server) "S" else "C", data.len }); + log("onData {s} ({d})", .{ if (this.handlers.is_server) "S" else "C", data.len }); if (this.native_callback.onData(data)) return; + const handlers = this.handlers; const callback = handlers.onData; if (callback == .zero or this.flags.finalizing) return; if (handlers.vm.isShuttingDown()) { @@ -687,13 +680,11 @@ pub fn NewSocket(comptime ssl: bool) type { } pub fn getListener(this: *This, _: *jsc.JSGlobalObject) JSValue { - const handlers = this.getHandlers(); - - if (!handlers.is_server or this.socket.isDetached()) { + if (!this.handlers.is_server or this.socket.isDetached()) { return .js_undefined; } - const l: *Listener = @fieldParentPtr("handlers", handlers); + const l: *Listener = @fieldParentPtr("handlers", this.handlers); return l.strong_self.get() orelse .js_undefined; } @@ -1350,14 +1341,13 @@ pub fn NewSocket(comptime ssl: bool) type { return globalObject.throw("Expected \"socket\" option", .{}); }; - var prev_handlers = this.getHandlers(); - - const handlers = try Handlers.fromJS(globalObject, socket_obj, prev_handlers.is_server); + const handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); + var prev_handlers = this.handlers; prev_handlers.unprotect(); - this.handlers.?.* = handlers; // TODO: this is a memory leak - this.handlers.?.withAsyncContextIfNeeded(globalObject); - this.handlers.?.protect(); + this.handlers.* = handlers; // TODO: this is a memory leak + this.handlers.withAsyncContextIfNeeded(globalObject); + this.handlers.protect(); return .js_undefined; } @@ -1399,7 +1389,7 @@ pub fn NewSocket(comptime ssl: bool) type { return .zero; } - var handlers = try Handlers.fromJS(globalObject, socket_obj, this.isServer()); + var handlers = try Handlers.fromJS(globalObject, socket_obj, this.handlers.is_server); if (globalObject.hasException()) { return .zero; @@ -1529,23 +1519,20 @@ pub fn NewSocket(comptime ssl: bool) type { const vm = handlers.vm; var raw_handlers_ptr = bun.default_allocator.create(Handlers) catch bun.outOfMemory(); - raw_handlers_ptr.* = blk: { - const this_handlers = this.getHandlers(); - break :blk .{ - .vm = vm, - .globalObject = globalObject, - .onOpen = this_handlers.onOpen, - .onClose = this_handlers.onClose, - .onData = this_handlers.onData, - .onWritable = this_handlers.onWritable, - .onTimeout = this_handlers.onTimeout, - .onConnectError = this_handlers.onConnectError, - .onEnd = this_handlers.onEnd, - .onError = this_handlers.onError, - .onHandshake = this_handlers.onHandshake, - .binary_type = this_handlers.binary_type, - .is_server = this_handlers.is_server, - }; + raw_handlers_ptr.* = .{ + .vm = vm, + .globalObject = globalObject, + .onOpen = this.handlers.onOpen, + .onClose = this.handlers.onClose, + .onData = this.handlers.onData, + .onWritable = this.handlers.onWritable, + .onTimeout = this.handlers.onTimeout, + .onConnectError = this.handlers.onConnectError, + .onEnd = this.handlers.onEnd, + .onError = this.handlers.onError, + .onHandshake = this.handlers.onHandshake, + .binary_type = this.handlers.binary_type, + .is_server = this.handlers.is_server, }; raw_handlers_ptr.protect(); @@ -1575,7 +1562,7 @@ pub fn NewSocket(comptime ssl: bool) type { tls.markActive(); // we're unrefing the original instance and refing the TLS instance - tls.poll_ref.ref(this.getHandlers().vm); + tls.poll_ref.ref(this.handlers.vm); // mark both instances on socket data if (new_socket.ext(WrappedSocket)) |ctx| { @@ -1587,7 +1574,7 @@ pub fn NewSocket(comptime ssl: bool) type { this.flags.is_active = false; // will free handlers when hits 0 active connections // the connection can be upgraded inside a handler call so we need to guarantee that it will be still alive - this.getHandlers().markInactive(); + this.handlers.markInactive(); this.has_pending_activity.store(false, .release); } diff --git a/src/bun.js/api/bun/socket/Listener.zig b/src/bun.js/api/bun/socket/Listener.zig index e241a05542..d87cd2bf6d 100644 --- a/src/bun.js/api/bun/socket/Listener.zig +++ b/src/bun.js/api/bun/socket/Listener.zig @@ -626,9 +626,7 @@ pub fn connectInner(globalObject: *jsc.JSGlobalObject, prev_maybe_tcp: ?*TCPSock if (ssl_enabled) { var tls = if (prev_maybe_tls) |prev| blk: { - if (prev.handlers) |prev_handlers| { - bun.destroy(prev_handlers); - } + bun.destroy(prev.handlers); bun.assert(prev.this_value != .zero); prev.handlers = handlers_ptr; bun.assert(prev.socket.socket == .detached); diff --git a/src/bun.js/api/bun/socket/tls_socket_functions.zig b/src/bun.js/api/bun/socket/tls_socket_functions.zig index 43ee2f9a6b..37e6b84c42 100644 --- a/src/bun.js/api/bun/socket/tls_socket_functions.zig +++ b/src/bun.js/api/bun/socket/tls_socket_functions.zig @@ -9,7 +9,7 @@ pub fn getServername(this: *This, globalObject: *jsc.JSGlobalObject, _: *jsc.Cal } pub fn setServername(this: *This, globalObject: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue { - if (this.isServer()) { + if (this.handlers.is_server) { return globalObject.throw("Cannot issue SNI from a TLS server-side socket", .{}); } @@ -118,7 +118,7 @@ pub fn getPeerCertificate(this: *This, globalObject: *jsc.JSGlobalObject, callfr const ssl_ptr = this.socket.ssl() orelse return .js_undefined; if (abbreviated) { - if (this.isServer()) { + if (this.handlers.is_server) { const cert = BoringSSL.SSL_get_peer_certificate(ssl_ptr); if (cert) |x509| { return X509.toJS(x509, globalObject); @@ -130,7 +130,7 @@ pub fn getPeerCertificate(this: *This, globalObject: *jsc.JSGlobalObject, callfr return X509.toJS(cert, globalObject); } var cert: ?*BoringSSL.X509 = null; - if (this.isServer()) { + if (this.handlers.is_server) { cert = BoringSSL.SSL_get_peer_certificate(ssl_ptr); } @@ -380,7 +380,7 @@ pub fn exportKeyingMaterial(this: *This, globalObject: *jsc.JSGlobalObject, call pub fn getEphemeralKeyInfo(this: *This, globalObject: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!JSValue { // only available for clients - if (this.isServer()) { + if (this.handlers.is_server) { return JSValue.jsNull(); } var result = JSValue.createEmptyObject(globalObject, 3); @@ -553,7 +553,7 @@ pub fn setVerifyMode(this: *This, globalObject: *jsc.JSGlobalObject, callframe: const request_cert = request_cert_js.toBoolean(); const reject_unauthorized = request_cert_js.toBoolean(); var verify_mode: c_int = BoringSSL.SSL_VERIFY_NONE; - if (this.isServer()) { + if (this.handlers.is_server) { if (request_cert) { verify_mode = BoringSSL.SSL_VERIFY_PEER; if (reject_unauthorized) diff --git a/src/bun.js/bindings/ScriptExecutionContext.cpp b/src/bun.js/bindings/ScriptExecutionContext.cpp index 721e437026..e43991ceb6 100644 --- a/src/bun.js/bindings/ScriptExecutionContext.cpp +++ b/src/bun.js/bindings/ScriptExecutionContext.cpp @@ -69,11 +69,8 @@ static HashMap& allSc ScriptExecutionContext* ScriptExecutionContext::getScriptExecutionContext(ScriptExecutionContextIdentifier identifier) { - if (identifier == 0) { - return nullptr; - } Locker locker { allScriptExecutionContextsMapLock }; - return allScriptExecutionContextsMap().getOptional(identifier).value_or(nullptr); + return allScriptExecutionContextsMap().get(identifier); } template diff --git a/src/bun.js/bindings/workaround-missing-symbols.cpp b/src/bun.js/bindings/workaround-missing-symbols.cpp index 15703aa7b9..928db9754f 100644 --- a/src/bun.js/bindings/workaround-missing-symbols.cpp +++ b/src/bun.js/bindings/workaround-missing-symbols.cpp @@ -299,5 +299,3 @@ extern "C" bool icu_hasBinaryProperty(UChar32 cp, unsigned int prop) { return u_hasBinaryProperty(cp, static_cast(prop)); } - -extern "C" __attribute__((weak)) void mi_thread_set_in_threadpool() {} diff --git a/src/bun.js/node/node_net_binding.zig b/src/bun.js/node/node_net_binding.zig index ee9be71556..1bb908171b 100644 --- a/src/bun.js/node/node_net_binding.zig +++ b/src/bun.js/node/node_net_binding.zig @@ -80,7 +80,7 @@ pub fn newDetachedSocket(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFr .socket_context = null, .ref_count = .init(), .protos = null, - .handlers = null, + .handlers = undefined, }); return socket.getThisValue(globalThis); } else { @@ -89,7 +89,7 @@ pub fn newDetachedSocket(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFr .socket_context = null, .ref_count = .init(), .protos = null, - .handlers = null, + .handlers = undefined, }); return socket.getThisValue(globalThis); } diff --git a/src/bun.js/web_worker.zig b/src/bun.js/web_worker.zig index 1b561dce7a..093fa17664 100644 --- a/src/bun.js/web_worker.zig +++ b/src/bun.js/web_worker.zig @@ -312,12 +312,12 @@ pub fn start( this.arena = try bun.MimallocArena.init(); var vm = try jsc.VirtualMachine.initWorker(this, .{ - .allocator = bun.default_allocator, + .allocator = this.arena.?.allocator(), .args = transform_options, .store_fd = this.store_fd, .graph = this.parent.standalone_module_graph, }); - vm.allocator = bun.default_allocator; + vm.allocator = this.arena.?.allocator(); vm.arena = &this.arena.?; var b = &vm.transpiler; diff --git a/src/bun.js/webcore/ScriptExecutionContext.zig b/src/bun.js/webcore/ScriptExecutionContext.zig index 002a698ea3..7cfc11f899 100644 --- a/src/bun.js/webcore/ScriptExecutionContext.zig +++ b/src/bun.js/webcore/ScriptExecutionContext.zig @@ -15,10 +15,6 @@ pub const Identifier = enum(u32) { // concurrently because we expect these identifiers are mostly used by off-thread tasks return (self.globalObject() orelse return null).bunVMConcurrently(); } - - pub fn valid(self: Identifier) bool { - return self.globalObject() != null; - } }; const bun = @import("bun"); diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index 083efafd20..b3659c1fb6 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -22,6 +22,8 @@ pub const LinkerContext = struct { options: LinkerOptions = .{}, + ambiguous_result_pool: std.ArrayList(MatchImport) = undefined, + loop: EventLoop, /// string buffer containing pre-formatted unique keys @@ -145,25 +147,18 @@ pub const LinkerContext = struct { ); } - pub fn computeQuotedSourceContents(this: *LinkerContext, _: std.mem.Allocator, source_index: Index.Int) void { + pub fn computeQuotedSourceContents(this: *LinkerContext, allocator: std.mem.Allocator, source_index: Index.Int) void { debug("Computing Quoted Source Contents: {d}", .{source_index}); const loader: options.Loader = this.parse_graph.input_files.items(.loader)[source_index]; - const quoted_source_contents: *?[]u8 = &this.graph.files.items(.quoted_source_contents)[source_index]; + const quoted_source_contents: *string = &this.graph.files.items(.quoted_source_contents)[source_index]; if (!loader.canHaveSourceMap()) { - if (quoted_source_contents.*) |slice| { - bun.default_allocator.free(slice); - quoted_source_contents.* = null; - } + quoted_source_contents.* = ""; return; } const source: *const Logger.Source = &this.parse_graph.input_files.items(.source)[source_index]; - var mutable = MutableString.initEmpty(bun.default_allocator); - js_printer.quoteForJSON(source.contents, &mutable, false) catch bun.outOfMemory(); - if (quoted_source_contents.*) |slice| { - bun.default_allocator.free(slice); - } - quoted_source_contents.* = mutable.slice(); + const mutable = MutableString.initEmpty(allocator); + quoted_source_contents.* = (js_printer.quoteForJSON(source.contents, mutable, false) catch bun.outOfMemory()).list.items; } }; @@ -213,6 +208,7 @@ pub const LinkerContext = struct { try this.graph.load(entry_points, sources, server_component_boundaries, bundle.dynamic_import_entry_points.keys()); bundle.dynamic_import_entry_points.deinit(); + this.ambiguous_result_pool = std.ArrayList(MatchImport).init(this.allocator); var runtime_named_exports = &this.graph.ast.items(.named_exports)[Index.runtime.get()]; @@ -713,8 +709,8 @@ pub const LinkerContext = struct { } var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + 2); - try js_printer.quoteForJSON(path.pretty, "e_buf, false); - j.pushStatic(quote_buf.slice()); // freed by arena + quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false); + j.pushStatic(quote_buf.list.items); // freed by arena } var next_mapping_source_index: i32 = 1; @@ -734,8 +730,8 @@ pub const LinkerContext = struct { var quote_buf = try MutableString.init(worker.allocator, path.pretty.len + ", ".len + 2); quote_buf.appendAssumeCapacity(", "); - try js_printer.quoteForJSON(path.pretty, "e_buf, false); - j.pushStatic(quote_buf.slice()); // freed by arena + quote_buf = try js_printer.quoteForJSON(path.pretty, quote_buf, false); + j.pushStatic(quote_buf.list.items); // freed by arena } } @@ -747,11 +743,11 @@ pub const LinkerContext = struct { const source_indices_for_contents = source_id_map.keys(); if (source_indices_for_contents.len > 0) { j.pushStatic("\n "); - j.pushStatic(quoted_source_map_contents[source_indices_for_contents[0]] orelse ""); + j.pushStatic(quoted_source_map_contents[source_indices_for_contents[0]]); for (source_indices_for_contents[1..]) |index| { j.pushStatic(",\n "); - j.pushStatic(quoted_source_map_contents[index] orelse ""); + j.pushStatic(quoted_source_map_contents[index]); } } j.pushStatic( @@ -2421,11 +2417,7 @@ pub const LinkerContext = struct { // 4. externals return .{ .joiner = j.* }; - var pieces = brk: { - errdefer j.deinit(); - break :brk try std.ArrayList(OutputPiece).initCapacity(allocator, count); - }; - errdefer pieces.deinit(); + var pieces = try std.ArrayList(OutputPiece).initCapacity(allocator, count); const complete_output = try j.done(allocator); var output = complete_output; diff --git a/src/bundler/LinkerGraph.zig b/src/bundler/LinkerGraph.zig index 1fabfea6d0..45330f3d93 100644 --- a/src/bundler/LinkerGraph.zig +++ b/src/bundler/LinkerGraph.zig @@ -429,7 +429,7 @@ pub const File = struct { entry_point_chunk_index: u32 = std.math.maxInt(u32), line_offset_table: bun.sourcemap.LineOffsetTable.List = .empty, - quoted_source_contents: ?[]u8 = null, + quoted_source_contents: string = "", pub fn isEntryPoint(this: *const File) bool { return this.entry_point_kind.isEntryPoint(); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index da5dacd792..195ad49cda 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -3879,21 +3879,6 @@ pub const CompileResult = union(enum) { javascript: struct { source_index: Index.Int, result: js_printer.PrintResult, - - pub fn code(this: @This()) []const u8 { - return switch (this.result) { - .result => |result| result.code, - else => "", - }; - } - - pub fn allocator(this: @This()) std.mem.Allocator { - return switch (this.result) { - .result => |result| result.code_allocator, - // empty slice can be freed by any allocator - else => bun.default_allocator, - }; - } }, css: struct { result: bun.Maybe([]const u8, anyerror), @@ -3913,7 +3898,6 @@ pub const CompileResult = union(enum) { .result = js_printer.PrintResult{ .result = .{ .code = "", - .code_allocator = bun.default_allocator, }, }, }, @@ -3921,7 +3905,10 @@ pub const CompileResult = union(enum) { pub fn code(this: *const CompileResult) []const u8 { return switch (this.*) { - .javascript => |r| r.code(), + .javascript => |r| switch (r.result) { + .result => |r2| r2.code, + else => "", + }, .css => |*c| switch (c.result) { .result => |v| v, .err => "", @@ -3930,13 +3917,6 @@ pub const CompileResult = union(enum) { }; } - pub fn allocator(this: *const CompileResult) ?std.mem.Allocator { - return switch (this.*) { - .javascript => |js| js.allocator(), - else => null, - }; - } - pub fn sourceMapChunk(this: *const CompileResult) ?sourcemap.Chunk { return switch (this.*) { .javascript => |r| switch (r.result) { diff --git a/src/bundler/linker_context/generateChunksInParallel.zig b/src/bundler/linker_context/generateChunksInParallel.zig index d4a33f371c..e3d868f846 100644 --- a/src/bundler/linker_context/generateChunksInParallel.zig +++ b/src/bundler/linker_context/generateChunksInParallel.zig @@ -71,7 +71,7 @@ pub fn generateChunksInParallel( } { - const chunk_contexts = c.allocator.alloc(GenerateChunkCtx, chunks.len) catch bun.outOfMemory(); + const chunk_contexts = c.allocator.alloc(GenerateChunkCtx, chunks.len) catch unreachable; defer c.allocator.free(chunk_contexts); { @@ -102,7 +102,7 @@ pub fn generateChunksInParallel( debug(" START {d} compiling part ranges", .{total_count}); defer debug(" DONE {d} compiling part ranges", .{total_count}); - const combined_part_ranges = c.allocator.alloc(PendingPartRange, total_count) catch bun.outOfMemory(); + const combined_part_ranges = c.allocator.alloc(PendingPartRange, total_count) catch unreachable; defer c.allocator.free(combined_part_ranges); var remaining_part_ranges = combined_part_ranges; var batch = ThreadPoolLib.Batch{}; @@ -315,7 +315,7 @@ pub fn generateChunksInParallel( } const bundler = @as(*bun.bundle_v2.BundleV2, @fieldParentPtr("linker", c)); - var static_route_visitor = StaticRouteVisitor{ .c = c, .visited = bun.bit_set.AutoBitSet.initEmpty(bun.default_allocator, c.graph.files.len) catch bun.outOfMemory() }; + var static_route_visitor = StaticRouteVisitor{ .c = c, .visited = bun.bit_set.AutoBitSet.initEmpty(bun.default_allocator, c.graph.files.len) catch unreachable }; defer static_route_visitor.deinit(); if (root_path.len > 0) { @@ -354,7 +354,7 @@ pub fn generateChunksInParallel( switch (chunk.content.sourcemap(c.options.source_maps)) { .external, .linked => |tag| { const output_source_map = chunk.output_source_map.finalize(bun.default_allocator, code_result.shifts) catch @panic("Failed to allocate memory for external source map"); - var source_map_final_rel_path = bun.default_allocator.alloc(u8, chunk.final_rel_path.len + ".map".len) catch bun.outOfMemory(); + var source_map_final_rel_path = bun.default_allocator.alloc(u8, chunk.final_rel_path.len + ".map".len) catch unreachable; bun.copy(u8, source_map_final_rel_path, chunk.final_rel_path); bun.copy(u8, source_map_final_rel_path[chunk.final_rel_path.len..], ".map"); @@ -443,8 +443,8 @@ pub fn generateChunksInParallel( fdpath[chunk.final_rel_path.len..][0..bun.bytecode_extension.len].* = bun.bytecode_extension.*; break :brk options.OutputFile.init(.{ - .output_path = bun.default_allocator.dupe(u8, source_provider_url_str.slice()) catch bun.outOfMemory(), - .input_path = std.fmt.allocPrint(bun.default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch bun.outOfMemory(), + .output_path = bun.default_allocator.dupe(u8, source_provider_url_str.slice()) catch unreachable, + .input_path = std.fmt.allocPrint(bun.default_allocator, "{s}" ++ bun.bytecode_extension, .{chunk.final_rel_path}) catch unreachable, .input_loader = .js, .hash = if (chunk.template.placeholder.hash != null) bun.hash(bytecode) else null, .output_kind = .bytecode, @@ -462,7 +462,7 @@ pub fn generateChunksInParallel( // an error c.log.addErrorFmt(null, Logger.Loc.Empty, bun.default_allocator, "Failed to generate bytecode for {s}", .{ chunk.final_rel_path, - }) catch bun.outOfMemory(); + }) catch unreachable; } } } diff --git a/src/bundler/linker_context/generateCodeForFileInChunkJS.zig b/src/bundler/linker_context/generateCodeForFileInChunkJS.zig index fd03de0e42..daa8a316dd 100644 --- a/src/bundler/linker_context/generateCodeForFileInChunkJS.zig +++ b/src/bundler/linker_context/generateCodeForFileInChunkJS.zig @@ -603,8 +603,7 @@ pub fn generateCodeForFileInChunkJS( if (out_stmts.len == 0) { return .{ .result = .{ - .code = "", - .code_allocator = bun.default_allocator, + .code = &[_]u8{}, .source_map = null, }, }; diff --git a/src/bundler/linker_context/generateCompileResultForJSChunk.zig b/src/bundler/linker_context/generateCompileResultForJSChunk.zig index cd0b13c8fc..3996ab0ce8 100644 --- a/src/bundler/linker_context/generateCompileResultForJSChunk.zig +++ b/src/bundler/linker_context/generateCompileResultForJSChunk.zig @@ -64,8 +64,8 @@ fn generateCompileResultForJSChunkImpl(worker: *ThreadPool.Worker, c: *LinkerCon return .{ .javascript = .{ - .source_index = part_range.source_index.get(), .result = result, + .source_index = part_range.source_index.get(), }, }; } diff --git a/src/bundler/linker_context/postProcessJSChunk.zig b/src/bundler/linker_context/postProcessJSChunk.zig index 1d4b99a431..c8b696ceae 100644 --- a/src/bundler/linker_context/postProcessJSChunk.zig +++ b/src/bundler/linker_context/postProcessJSChunk.zig @@ -16,8 +16,8 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu defer arena.deinit(); // Also generate the cross-chunk binding code - var cross_chunk_prefix: js_printer.PrintResult = undefined; - var cross_chunk_suffix: js_printer.PrintResult = undefined; + var cross_chunk_prefix: []u8 = &.{}; + var cross_chunk_suffix: []u8 = &.{}; var runtime_scope: *Scope = &c.graph.ast.items(.module_scope)[c.graph.files.items(.input_file)[Index.runtime.value].get()]; var runtime_members = &runtime_scope.members; @@ -68,7 +68,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu }, chunk.renamer, false, - ); + ).result.code; cross_chunk_suffix = js_printer.print( worker.allocator, c.resolver.opts.target, @@ -81,7 +81,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu }, chunk.renamer, false, - ); + ).result.code; } // Generate the exports for the entry point, if there are any @@ -107,7 +107,6 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu .input = chunk.unique_key, }, }; - errdefer j.deinit(); const output_format = c.options.output_format; var line_offset: bun.sourcemap.LineColumnOffset.Optional = if (c.options.source_maps != .none) .{ .value = .{} } else .{ .null = {} }; @@ -120,7 +119,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu // Start with the hashbang if there is one. This must be done before the // banner because it only works if it's literally the first character. if (chunk.isEntryPoint()) { - const is_bun = c.graph.ast.items(.target)[chunk.entry_point.source_index].isBun(); + const is_bun = ctx.c.graph.ast.items(.target)[chunk.entry_point.source_index].isBun(); const hashbang = c.graph.ast.items(.hashbang)[chunk.entry_point.source_index]; if (hashbang.len > 0) { @@ -200,10 +199,10 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu else => {}, // no wrapper } - if (cross_chunk_prefix.result.code.len > 0) { + if (cross_chunk_prefix.len > 0) { newline_before_comment = true; - line_offset.advance(cross_chunk_prefix.result.code); - j.push(cross_chunk_prefix.result.code, cross_chunk_prefix.result.code_allocator); + line_offset.advance(cross_chunk_prefix); + j.push(cross_chunk_prefix, bun.default_allocator); } // Concatenate the generated JavaScript chunks together @@ -323,16 +322,16 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu // Stick the entry point tail at the end of the file. Deliberately don't // include any source mapping information for this because it's automatically // generated and doesn't correspond to a location in the input file. - j.push(tail_code, entry_point_tail.allocator()); + j.push(tail_code, bun.default_allocator); } // Put the cross-chunk suffix inside the IIFE - if (cross_chunk_suffix.result.code.len > 0) { + if (cross_chunk_suffix.len > 0) { if (newline_before_comment) { j.pushStatic("\n"); } - j.push(cross_chunk_suffix.result.code, cross_chunk_suffix.result.code_allocator); + j.push(cross_chunk_suffix, bun.default_allocator); } switch (output_format) { @@ -355,7 +354,7 @@ pub fn postProcessJSChunk(ctx: GenerateChunkCtx, worker: *ThreadPool.Worker, chu { const input = c.parse_graph.input_files.items(.source)[chunk.entry_point.source_index].path; var buf = MutableString.initEmpty(worker.allocator); - js_printer.quoteForJSON(input.pretty, &buf, true) catch bun.outOfMemory(); + js_printer.quoteForJSONBuffer(input.pretty, &buf, true) catch bun.outOfMemory(); const str = buf.slice(); // worker.allocator is an arena j.pushStatic(str); line_offset.advance(str); @@ -816,7 +815,6 @@ pub fn generateEntryPointTailJS( .source_index = source_index, .result = .{ .result = .{ .code = "", - .code_allocator = bun.default_allocator, } }, }, }; diff --git a/src/http/HTTPThread.zig b/src/http/HTTPThread.zig index deb4187602..354cf93483 100644 --- a/src/http/HTTPThread.zig +++ b/src/http/HTTPThread.zig @@ -196,7 +196,7 @@ pub fn init(opts: *const InitOpts) void { pub fn onStart(opts: InitOpts) void { Output.Source.configureNamedThread("HTTP Client"); bun.http.default_arena = Arena.init() catch unreachable; - bun.http.default_allocator = bun.default_allocator; + bun.http.default_allocator = bun.http.default_arena.allocator(); const loop = bun.jsc.MiniEventLoop.initGlobal(null); diff --git a/src/js_printer.zig b/src/js_printer.zig index 60c453a573..5d304f8def 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -145,6 +145,12 @@ pub fn estimateLengthForUTF8(input: []const u8, comptime ascii_only: bool, compt return len; } +pub fn quoteForJSON(text: []const u8, output_: MutableString, comptime ascii_only: bool) !MutableString { + var bytes = output_; + try quoteForJSONBuffer(text, &bytes, ascii_only); + return bytes; +} + pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer: Writer, comptime quote_char: u8, comptime ascii_only: bool, comptime json: bool, comptime encoding: strings.Encoding) !void { const text = if (comptime encoding == .utf16) @as([]const u16, @alignCast(std.mem.bytesAsSlice(u16, text_in))) else text_in; if (comptime json and quote_char != '"') @compileError("for json, quote_char must be '\"'"); @@ -341,7 +347,7 @@ pub fn writePreQuotedString(text_in: []const u8, comptime Writer: type, writer: } } } -pub fn quoteForJSON(text: []const u8, bytes: *MutableString, comptime ascii_only: bool) !void { +pub fn quoteForJSONBuffer(text: []const u8, bytes: *MutableString, comptime ascii_only: bool) !void { const writer = bytes.writer(); try bytes.growIfNeeded(estimateLengthForUTF8(text, ascii_only, '"')); @@ -483,14 +489,28 @@ pub const RequireOrImportMeta = struct { }; pub const PrintResult = union(enum) { - result: Success, + result: struct { + code: []u8, + source_map: ?SourceMap.Chunk = null, + }, err: anyerror, - pub const Success = struct { - code: []u8, - code_allocator: std.mem.Allocator, - source_map: ?SourceMap.Chunk = null, - }; + pub fn clone( + this: PrintResult, + allocator: std.mem.Allocator, + ) !PrintResult { + return switch (this) { + .result => PrintResult{ + .result = .{ + .code = try allocator.dupe(u8, this.result.code), + .source_map = this.result.source_map, + }, + }, + .err => PrintResult{ + .err = this.err, + }, + }; + } }; // do not make this a packed struct @@ -5380,10 +5400,6 @@ pub fn NewWriter( return this.ctx.getMutableBuffer(); } - pub fn takeBuffer(this: *Self) MutableString { - return this.ctx.takeBuffer(); - } - pub fn slice(this: *Self) string { return this.ctx.slice(); } @@ -5488,11 +5504,6 @@ pub const BufferWriter = struct { return &this.buffer; } - pub fn takeBuffer(this: *BufferWriter) MutableString { - defer this.buffer = .initEmpty(this.buffer.allocator); - return this.buffer; - } - pub fn getWritten(this: *BufferWriter) []u8 { return this.buffer.list.items; } @@ -5797,13 +5808,11 @@ pub fn printAst( if (comptime FeatureFlags.runtime_transpiler_cache and generate_source_map) { if (opts.source_map_handler) |handler| { - var source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); + const source_maps_chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); if (opts.runtime_transpiler_cache) |cache| { cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items); } - defer source_maps_chunk.deinit(); - try handler.onSourceMapChunk(source_maps_chunk, source); } else { if (opts.runtime_transpiler_cache) |cache| { @@ -5812,9 +5821,7 @@ pub fn printAst( } } else if (comptime generate_source_map) { if (opts.source_map_handler) |handler| { - var chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); - defer chunk.deinit(); - try handler.onSourceMapChunk(chunk, source); + try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source); } } @@ -6004,12 +6011,9 @@ pub fn printWithWriterAndPlatform( break :brk chunk; } else null; - var buffer = printer.writer.takeBuffer(); - return .{ .result = .{ - .code = buffer.toOwnedSlice(), - .code_allocator = buffer.allocator, + .code = written, .source_map = source_map, }, }; @@ -6058,9 +6062,7 @@ pub fn printCommonJS( if (comptime generate_source_map) { if (opts.source_map_handler) |handler| { - var chunk = printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()); - defer chunk.deinit(); - try handler.onSourceMapChunk(chunk, source); + try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source); } } diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 606db47050..20e3096910 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -1522,9 +1522,15 @@ pub fn appendSourceMapChunk( start_state.original_line += original_line.value; start_state.original_column += original_column.value; - var str = MutableString.initEmpty(allocator); - appendMappingToBuffer(&str, j.lastByte(), prev_end_state, start_state); - j.push(str.slice(), allocator); + j.push( + appendMappingToBuffer( + MutableString.initEmpty(allocator), + j.lastByte(), + prev_end_state, + start_state, + ).list.items, + allocator, + ); // Then append everything after that without modification. j.pushStatic(source_map); @@ -1549,7 +1555,8 @@ pub fn appendSourceMappingURLRemote( } /// This function is extremely hot. -pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state: SourceMapState, current_state: SourceMapState) void { +pub fn appendMappingToBuffer(buffer_: MutableString, last_byte: u8, prev_state: SourceMapState, current_state: SourceMapState) MutableString { + var buffer = buffer_; const needs_comma = last_byte != 0 and last_byte != ';' and last_byte != '"'; const vlqs = [_]VLQ{ @@ -1582,6 +1589,8 @@ pub fn appendMappingToBuffer(buffer: *MutableString, last_byte: u8, prev_state: @memcpy(writable[0..item.len], item.slice()); writable = writable[item.len..]; } + + return buffer; } pub const Chunk = struct { @@ -1601,28 +1610,22 @@ pub const Chunk = struct { /// ignore empty chunks should_ignore: bool = true, - pub fn initEmpty() Chunk { - return .{ - .buffer = MutableString.initEmpty(bun.default_allocator), - .mappings_count = 0, - .end_state = .{}, - .final_generated_column = 0, - .should_ignore = true, - }; - } - - pub fn deinit(this: *Chunk) void { - this.buffer.deinit(); - } + pub const empty: Chunk = .{ + .buffer = MutableString.initEmpty(bun.default_allocator), + .mappings_count = 0, + .end_state = .{}, + .final_generated_column = 0, + .should_ignore = true, + }; pub fn printSourceMapContents( chunk: Chunk, source: *const Logger.Source, - mutable: *MutableString, + mutable: MutableString, include_sources_contents: bool, comptime ascii_only: bool, - ) !void { - try printSourceMapContentsAtOffset( + ) !MutableString { + return printSourceMapContentsAtOffset( chunk, source, mutable, @@ -1635,11 +1638,13 @@ pub const Chunk = struct { pub fn printSourceMapContentsAtOffset( chunk: Chunk, source: *const Logger.Source, - mutable: *MutableString, + mutable: MutableString, include_sources_contents: bool, offset: usize, comptime ascii_only: bool, - ) !void { + ) !MutableString { + var output = mutable; + // attempt to pre-allocate var filename_buf: bun.PathBuffer = undefined; @@ -1652,21 +1657,23 @@ pub const Chunk = struct { filename = filename_buf[0 .. filename.len + 1]; } - mutable.growIfNeeded( + output.growIfNeeded( filename.len + 2 + (source.contents.len * @as(usize, @intFromBool(include_sources_contents))) + (chunk.buffer.list.items.len - offset) + 32 + 39 + 29 + 22 + 20, ) catch unreachable; - try mutable.append("{\n \"version\":3,\n \"sources\": ["); + try output.append("{\n \"version\":3,\n \"sources\": ["); - try JSPrinter.quoteForJSON(filename, mutable, ascii_only); + output = try JSPrinter.quoteForJSON(filename, output, ascii_only); if (include_sources_contents) { - try mutable.append("],\n \"sourcesContent\": ["); - try JSPrinter.quoteForJSON(source.contents, mutable, ascii_only); + try output.append("],\n \"sourcesContent\": ["); + output = try JSPrinter.quoteForJSON(source.contents, output, ascii_only); } - try mutable.append("],\n \"mappings\": "); - try JSPrinter.quoteForJSON(chunk.buffer.list.items[offset..], mutable, ascii_only); - try mutable.append(", \"names\": []\n}"); + try output.append("],\n \"mappings\": "); + output = try JSPrinter.quoteForJSON(chunk.buffer.list.items[offset..], output, ascii_only); + try output.append(", \"names\": []\n}"); + + return output; } // TODO: remove the indirection by having generic functions for SourceMapFormat and NewBuilder. Source maps are always VLQ @@ -1695,10 +1702,6 @@ pub const Chunk = struct { return this.ctx.getBuffer(); } - pub inline fn takeBuffer(this: *Format) MutableString { - return this.ctx.takeBuffer(); - } - pub inline fn getCount(this: Format) usize { return this.ctx.getCount(); } @@ -1711,6 +1714,8 @@ pub const Chunk = struct { offset: usize = 0, approximate_input_line_count: usize = 0, + pub const Format = SourceMapFormat(VLQSourceMap); + pub fn init(allocator: std.mem.Allocator, prepend_count: bool) VLQSourceMap { var map = VLQSourceMap{ .data = MutableString.initEmpty(allocator), @@ -1735,7 +1740,7 @@ pub const Chunk = struct { else 0; - appendMappingToBuffer(&this.data, last_byte, prev_state, current_state); + this.data = appendMappingToBuffer(this.data, last_byte, prev_state, current_state); this.count += 1; } @@ -1747,11 +1752,6 @@ pub const Chunk = struct { return this.data; } - pub fn takeBuffer(this: *VLQSourceMap) MutableString { - defer this.data = .initEmpty(this.data.allocator); - return this.data; - } - pub fn getCount(this: VLQSourceMap) usize { return this.count; } @@ -1760,6 +1760,7 @@ pub const Chunk = struct { pub fn NewBuilder(comptime SourceMapFormatType: type) type { return struct { const ThisBuilder = @This(); + input_source_map: ?*SourceMap = null, source_map: SourceMapper, line_offset_tables: LineOffsetTable.List = .{}, prev_state: SourceMapState = SourceMapState{}, @@ -1790,14 +1791,13 @@ pub const Chunk = struct { pub noinline fn generateChunk(b: *ThisBuilder, output: []const u8) Chunk { b.updateGeneratedLineAndColumn(output); - var buffer = b.source_map.getBuffer(); if (b.prepend_count) { - buffer.list.items[0..8].* = @as([8]u8, @bitCast(buffer.list.items.len)); - buffer.list.items[8..16].* = @as([8]u8, @bitCast(b.source_map.getCount())); - buffer.list.items[16..24].* = @as([8]u8, @bitCast(b.approximate_input_line_count)); + b.source_map.getBuffer().list.items[0..8].* = @as([8]u8, @bitCast(b.source_map.getBuffer().list.items.len)); + b.source_map.getBuffer().list.items[8..16].* = @as([8]u8, @bitCast(b.source_map.getCount())); + b.source_map.getBuffer().list.items[16..24].* = @as([8]u8, @bitCast(b.approximate_input_line_count)); } return Chunk{ - .buffer = b.source_map.takeBuffer(), + .buffer = b.source_map.getBuffer(), .mappings_count = b.source_map.getCount(), .end_state = b.prev_state, .final_generated_column = b.generated_column, @@ -1873,7 +1873,17 @@ pub const Chunk = struct { b.last_generated_update = @as(u32, @truncate(output.len)); } - pub fn appendMapping(b: *ThisBuilder, current_state: SourceMapState) void { + pub fn appendMapping(b: *ThisBuilder, current_state_: SourceMapState) void { + var current_state = current_state_; + // If the input file had a source map, map all the way back to the original + if (b.input_source_map) |input| { + if (input.find(current_state.original_line, current_state.original_column)) |mapping| { + current_state.source_index = mapping.sourceIndex(); + current_state.original_line = mapping.originalLine(); + current_state.original_column = mapping.originalColumn(); + } + } + b.appendMappingWithoutRemapping(current_state); } diff --git a/src/string.zig b/src/string.zig index 37b15dcce5..9b924e9bdf 100644 --- a/src/string.zig +++ b/src/string.zig @@ -757,13 +757,13 @@ pub const String = extern struct { pub fn toThreadSafeSlice(this: *const String, allocator: std.mem.Allocator) SliceWithUnderlyingString { if (this.tag == .WTFStringImpl) { if (!this.value.WTFStringImpl.isThreadSafe()) { - const slice = this.value.WTFStringImpl.toUTF8(allocator); + const slice = this.value.WTFStringImpl.toUTF8WithoutRef(allocator); bun.debugAssert(!slice.allocator.isNull()); if (comptime bun.Environment.allow_assert) { - // bun.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator - // bun.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same + bun.assert(!isWTFAllocator(slice.allocator.get().?)); // toUTF8WithoutRef() should never return a WTF allocator + bun.assert(slice.allocator.get().?.vtable == allocator.vtable); // assert that the allocator is the same } // We've already cloned the string, so let's just return the slice. diff --git a/src/string/MutableString.zig b/src/string/MutableString.zig index 42e22b2b3d..643e51ca39 100644 --- a/src/string/MutableString.zig +++ b/src/string/MutableString.zig @@ -240,7 +240,7 @@ pub inline fn lenI(self: *MutableString) i32 { return @as(i32, @intCast(self.list.items.len)); } -pub fn toOwnedSlice(self: *MutableString) []u8 { +pub fn toOwnedSlice(self: *MutableString) string { return self.list.toOwnedSlice(self.allocator) catch bun.outOfMemory(); // TODO } diff --git a/src/string/StringJoiner.zig b/src/string/StringJoiner.zig index bb2083d053..c18a7a54e0 100644 --- a/src/string/StringJoiner.zig +++ b/src/string/StringJoiner.zig @@ -104,20 +104,6 @@ pub fn done(this: *StringJoiner, allocator: Allocator) ![]u8 { return slice; } -pub fn deinit(this: *StringJoiner) void { - var current: ?*Node = this.head orelse { - assert(this.tail == null); - assert(this.len == 0); - return; - }; - - while (current) |node| { - const prev = node; - current = node.next; - prev.deinit(this.allocator); - } -} - /// Same as `.done`, but appends extra slice `end` pub fn doneWithEnd(this: *StringJoiner, allocator: Allocator, end: []const u8) ![]u8 { var current: ?*Node = this.head orelse { diff --git a/src/threading/ThreadPool.zig b/src/threading/ThreadPool.zig index 5862ccb281..610d7440b7 100644 --- a/src/threading/ThreadPool.zig +++ b/src/threading/ThreadPool.zig @@ -550,8 +550,6 @@ pub const Thread = struct { /// Thread entry point which runs a worker for the ThreadPool fn run(thread_pool: *ThreadPool) void { - bun.mimalloc.mi_thread_set_in_threadpool(); - { var counter_buf: [100]u8 = undefined; const int = counter.fetchAdd(1, .seq_cst); diff --git a/test/bake/dev/ecosystem.test.ts b/test/bake/dev/ecosystem.test.ts index 0f2aece4fa..e93a39ff6d 100644 --- a/test/bake/dev/ecosystem.test.ts +++ b/test/bake/dev/ecosystem.test.ts @@ -12,7 +12,6 @@ import { devTest } from "../bake-harness"; devTest("svelte component islands example", { fixture: "svelte-component-islands", timeoutMultiplier: 2, - skip: ["win32"], async test(dev) { const html = await dev.fetch("/").text(); if (html.includes("Bun__renderFallbackError")) throw new Error("failed"); diff --git a/test/internal/ban-limits.json b/test/internal/ban-limits.json index 9671650d8f..19df06ac7c 100644 --- a/test/internal/ban-limits.json +++ b/test/internal/ban-limits.json @@ -7,7 +7,7 @@ ".stdDir()": 40, ".stdFile()": 18, "// autofix": 168, - ": [a-zA-Z0-9_\\.\\*\\?\\[\\]\\(\\)]+ = undefined,": 229, + ": [a-zA-Z0-9_\\.\\*\\?\\[\\]\\(\\)]+ = undefined,": 230, "== alloc.ptr": 0, "== allocator.ptr": 0, "@import(\"bun\").": 0, diff --git a/test/js/bun/http/body-leak-test-fixture.ts b/test/js/bun/http/body-leak-test-fixture.ts index a8713fa094..7c50ad8848 100644 --- a/test/js/bun/http/body-leak-test-fixture.ts +++ b/test/js/bun/http/body-leak-test-fixture.ts @@ -39,7 +39,9 @@ const server = Bun.serve({ } } else if (url.endsWith("/incomplete-streaming")) { const reader = req.body?.getReader(); - await reader?.read(); + if (!reader) { + reader?.read(); + } } else if (url.endsWith("/streaming-echo")) { return new Response(req.body, { headers: { diff --git a/test/js/bun/perf/static-initializers.test.ts b/test/js/bun/perf/static-initializers.test.ts index 7648aa8862..1977f495f6 100644 --- a/test/js/bun/perf/static-initializers.test.ts +++ b/test/js/bun/perf/static-initializers.test.ts @@ -64,6 +64,6 @@ describe("static initializers", () => { expect( bunInitializers.length, `Do not add static initializers to Bun. Static initializers are called when Bun starts up, regardless of whether you use the variables or not. This makes Bun slower.`, - ).toBe(process.arch == "arm64" ? 3 : 4); + ).toBe(process.arch == "arm64" ? 1 : 2); }); });