diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index 85d283d9e8..a87827a156 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -9,6 +9,8 @@ const Output = bun.Output; const Global = bun.Global; const Environment = bun.Environment; const Syscall = bun.sys; +const SourceMap = bun.sourcemap; +const StringPointer = bun.StringPointer; const w = std.os.windows; @@ -106,7 +108,7 @@ pub const StandaloneModuleGraph = struct { pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.JSC.WebCore.Blob { if (this.cached_blob == null) { - var store = bun.JSC.WebCore.Blob.Store.init(@constCast(this.contents), bun.default_allocator); + const store = bun.JSC.WebCore.Blob.Store.init(@constCast(this.contents), bun.default_allocator); // make it never free store.ref(); @@ -130,24 +132,61 @@ pub const StandaloneModuleGraph = struct { }; pub const LazySourceMap = union(enum) { - compressed: []const u8, - decompressed: bun.sourcemap, + serialized: SerializedSourceMap, + parsed: *SourceMap.ParsedSourceMap, + none, - pub fn load(this: *LazySourceMap, log: *bun.logger.Log, allocator: std.mem.Allocator) !*bun.sourcemap { - if (this.* == .decompressed) return &this.decompressed; + /// It probably is not possible to run two decoding jobs on the same file + var init_lock: bun.Lock = .{}; - var decompressed = try allocator.alloc(u8, bun.zstd.getDecompressedSize(this.compressed)); - const result = bun.zstd.decompress(decompressed, this.compressed); - if (result == .err) { - allocator.free(decompressed); - log.addError(null, bun.logger.Loc.Empty, bun.span(result.err)) catch unreachable; - return error.@"Failed to decompress sourcemap"; - } - errdefer allocator.free(decompressed); - const bytes = decompressed[0..result.success]; + pub fn load(this: *LazySourceMap) ?*SourceMap.ParsedSourceMap { + init_lock.lock(); + defer init_lock.unlock(); - this.* = .{ .decompressed = try bun.sourcemap.parse(allocator, &bun.logger.Source.initPathString("sourcemap.json", bytes), log) }; - return &this.decompressed; + return switch (this.*) { + .none => null, + .parsed => |map| map, + .serialized => |serialized| { + var stored = switch (SourceMap.Mapping.parse( + bun.default_allocator, + serialized.mappingVLQ(), + null, + std.math.maxInt(i32), + std.math.maxInt(i32), + )) { + .success => |x| x, + .fail => { + this.* = .none; + return null; + }, + }; + + const source_files = serialized.sourceFileNames(); + const slices = bun.default_allocator.alloc(?[]u8, source_files.len * 2) catch bun.outOfMemory(); + + const file_names: [][]const u8 = @ptrCast(slices[0..source_files.len]); + const decompressed_contents_slice = slices[source_files.len..][0..source_files.len]; + for (file_names, source_files) |*dest, src| { + dest.* = src.slice(serialized.bytes); + } + + @memset(decompressed_contents_slice, null); + + const data = bun.new(SerializedSourceMap.Loaded, .{ + .map = serialized, + .decompressed_files = decompressed_contents_slice, + }); + + stored.external_source_names = file_names; + stored.underlying_provider = .{ .data = @truncate(@intFromPtr(data)) }; + stored.is_standalone_module_graph = true; + + const parsed = stored.new(); // allocate this on the heap + parsed.ref(); // never free + this.* = .{ .parsed = parsed }; + return parsed; + }, + }; } }; @@ -159,7 +198,7 @@ pub const StandaloneModuleGraph = struct { const trailer = "\n---- Bun! ----\n"; - pub fn fromBytes(allocator: std.mem.Allocator, raw_bytes: []const u8, offsets: Offsets) !StandaloneModuleGraph { + pub fn fromBytes(allocator: std.mem.Allocator, raw_bytes: []u8, offsets: Offsets) !StandaloneModuleGraph { if (raw_bytes.len == 0) return StandaloneModuleGraph{ .files = bun.StringArrayHashMap(File).init(allocator), }; @@ -180,13 +219,18 @@ pub const StandaloneModuleGraph = struct { .name = sliceToZ(raw_bytes, module.name), .loader = module.loader, .contents = sliceToZ(raw_bytes, module.contents), - .sourcemap = LazySourceMap{ - .compressed = sliceTo(raw_bytes, module.sourcemap), - }, + .sourcemap = if (module.sourcemap.length > 0) + .{ .serialized = .{ + .bytes = @alignCast(sliceTo(raw_bytes, module.sourcemap)), + } } + else + .none, }, ); } + modules.lockPointers(); // make the pointers stable forever + return StandaloneModuleGraph{ .bytes = raw_bytes[0..offsets.byte_count], .files = modules, @@ -209,6 +253,7 @@ pub const StandaloneModuleGraph = struct { pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile) ![]u8 { var serialize_trace = bun.tracy.traceNamed(@src(), "StandaloneModuleGraph.serialize"); defer serialize_trace.end(); + var entry_point_id: ?usize = null; var string_builder = bun.StringBuilder{}; var module_count: usize = 0; @@ -217,7 +262,11 @@ pub const StandaloneModuleGraph = struct { string_builder.countZ(prefix); if (output_file.value == .buffer) { if (output_file.output_kind == .sourcemap) { - string_builder.cap += bun.zstd.compressBound(output_file.value.buffer.bytes.len); + // This is an over-estimation to ensure that we allocate + // enough memory for the source-map contents. Calculating + // the exact amount is not possible without allocating as it + // involves a JSON parser. + string_builder.cap += output_file.value.buffer.bytes.len * 2; } else { if (entry_point_id == null) { if (output_file.output_kind == .@"entry-point") { @@ -236,16 +285,19 @@ pub const StandaloneModuleGraph = struct { string_builder.cap += @sizeOf(CompiledModuleGraphFile) * output_files.len; string_builder.cap += trailer.len; string_builder.cap += 16; - - { - var offsets_ = Offsets{}; - string_builder.cap += std.mem.asBytes(&offsets_).len; - } + string_builder.cap += @sizeOf(Offsets); try string_builder.allocate(allocator); var modules = try std.ArrayList(CompiledModuleGraphFile).initCapacity(allocator, module_count); + var source_map_header_list = std.ArrayList(u8).init(allocator); + defer source_map_header_list.deinit(); + var source_map_string_list = std.ArrayList(u8).init(allocator); + defer source_map_string_list.deinit(); + var source_map_arena = bun.ArenaAllocator.init(allocator); + defer source_map_arena.deinit(); + for (output_files) |output_file| { if (output_file.output_kind == .sourcemap) { continue; @@ -270,12 +322,19 @@ pub const StandaloneModuleGraph = struct { }, }; if (output_file.source_map_index != std.math.maxInt(u32)) { - const remaining_slice = string_builder.allocatedSlice()[string_builder.len..]; - const compressed_result = bun.zstd.compress(remaining_slice, output_files[output_file.source_map_index].value.buffer.bytes, 1); - if (compressed_result == .err) { - bun.Output.panic("Unexpected error compressing sourcemap: {s}", .{bun.span(compressed_result.err)}); - } - module.sourcemap = string_builder.add(compressed_result.success); + defer source_map_header_list.clearRetainingCapacity(); + defer source_map_string_list.clearRetainingCapacity(); + _ = source_map_arena.reset(.retain_capacity); + try serializeJsonSourceMapForStandalone( + &source_map_header_list, + &source_map_string_list, + source_map_arena.allocator(), + output_files[output_file.source_map_index].value.buffer.bytes, + ); + module.sourcemap = string_builder.addConcat(&.{ + source_map_header_list.items, + source_map_string_list.items, + }); } modules.appendAssumeCapacity(module); } @@ -293,8 +352,12 @@ pub const StandaloneModuleGraph = struct { if (comptime Environment.isDebug) { // An expensive sanity check: - var graph = try fromBytes(allocator, output_bytes, offsets); - defer graph.files.deinit(); + var graph = try fromBytes(allocator, @alignCast(output_bytes), offsets); + defer { + graph.files.unlockPointers(); + graph.files.deinit(); + } + bun.assert_eql(graph.files.count(), modules.items.len); } @@ -824,4 +887,172 @@ pub const StandaloneModuleGraph = struct { else => @compileError("TODO"), } } + + /// Source map serialization in the bundler is specially designed to be + /// loaded in memory as is. Source contents are compressed with ZSTD to + /// reduce the file size, and mappings are stored as uncompressed VLQ. + pub const SerializedSourceMap = struct { + bytes: []const u8, + + /// Following the header bytes: + /// - source_files_count number of StringPointer, file names + /// - source_files_count number of StringPointer, zstd compressed contents + /// - the mapping data, `map_vlq_length` bytes + /// - all the StringPointer contents + pub const Header = extern struct { + source_files_count: u32, + map_bytes_length: u32, + }; + + pub fn header(map: SerializedSourceMap) *align(1) const Header { + return @ptrCast(map.bytes.ptr); + } + + pub fn mappingVLQ(map: SerializedSourceMap) []const u8 { + const head = map.header(); + const start = @sizeOf(Header) + head.source_files_count * @sizeOf(StringPointer) * 2; + return map.bytes[start..][0..head.map_bytes_length]; + } + + pub fn sourceFileNames(map: SerializedSourceMap) []align(1) const StringPointer { + const head = map.header(); + return @as([*]align(1) const StringPointer, @ptrCast(map.bytes[@sizeOf(Header)..]))[0..head.source_files_count]; + } + + fn compressedSourceFiles(map: SerializedSourceMap) []align(1) const StringPointer { + const head = map.header(); + return @as([*]align(1) const StringPointer, @ptrCast(map.bytes[@sizeOf(Header)..]))[head.source_files_count..][0..head.source_files_count]; + } + + /// Once loaded, this map stores additional data for keeping track of source code. + pub const Loaded = struct { + map: SerializedSourceMap, + + /// Only decompress source code once! Once a file is decompressed, + /// it is stored here. Decompression failures are stored as an empty + /// string, which will be treated as "no contents". + decompressed_files: []?[]u8, + + pub fn sourceFileContents(this: Loaded, index: usize) ?[]const u8 { + if (this.decompressed_files[index]) |decompressed| { + return if (decompressed.len == 0) null else decompressed; + } + + const compressed_codes = this.map.compressedSourceFiles(); + const compressed_file = compressed_codes[@intCast(index)].slice(this.map.bytes); + const size = bun.zstd.getDecompressedSize(compressed_file); + + const bytes = bun.default_allocator.alloc(u8, size) catch bun.outOfMemory(); + const result = bun.zstd.decompress(bytes, compressed_file); + + if (result == .err) { + bun.Output.warn("Source map decompression error: {s}", .{result.err}); + bun.default_allocator.free(bytes); + this.decompressed_files[index] = ""; + return null; + } + + const data = bytes[0..result.success]; + this.decompressed_files[index] = data; + return data; + } + }; + }; + + pub fn serializeJsonSourceMapForStandalone( + header_list: *std.ArrayList(u8), + string_payload: *std.ArrayList(u8), + arena: std.mem.Allocator, + json_source: []const u8, + ) !void { + const out = header_list.writer(); + const json_src = bun.logger.Source.initPathString("sourcemap.json", json_source); + var log = bun.logger.Log.init(arena); + defer log.deinit(); + + // the allocator given to the JS parser is not respected for all parts + // of the parse, so we need to remember to reset the ast store + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + defer { + bun.JSAst.Expr.Data.Store.reset(); + bun.JSAst.Stmt.Data.Store.reset(); + } + var json = bun.JSON.ParseJSON(&json_src, &log, arena) catch + return error.InvalidSourceMap; + + const mappings_str = json.get("mappings") orelse + return error.InvalidSourceMap; + if (mappings_str.data != .e_string) + return error.InvalidSourceMap; + const sources_content = switch ((json.get("sourcesContent") orelse return error.InvalidSourceMap).data) { + .e_array => |arr| arr, + else => return error.InvalidSourceMap, + }; + const sources_paths = switch ((json.get("sources") orelse return error.InvalidSourceMap).data) { + .e_array => |arr| arr, + else => return error.InvalidSourceMap, + }; + if (sources_content.items.len != sources_paths.items.len) { + return error.InvalidSourceMap; + } + + const map_vlq: []const u8 = mappings_str.data.e_string.slice(arena); + + try out.writeInt(u32, sources_paths.items.len, .little); + try out.writeInt(u32, @intCast(map_vlq.len), .little); + + const string_payload_start_location = @sizeOf(u32) + + @sizeOf(u32) + + @sizeOf(bun.StringPointer) * sources_content.items.len * 2 + // path + source + map_vlq.len; + + for (sources_paths.items.slice()) |item| { + if (item.data != .e_string) + return error.InvalidSourceMap; + + const decoded = try item.data.e_string.stringDecodedUTF8(arena); + + const offset = string_payload.items.len; + try string_payload.appendSlice(decoded); + + const slice = bun.StringPointer{ + .offset = @intCast(offset + string_payload_start_location), + .length = @intCast(string_payload.items.len - offset), + }; + try out.writeInt(u32, slice.offset, .little); + try out.writeInt(u32, slice.length, .little); + } + + for (sources_content.items.slice()) |item| { + if (item.data != .e_string) + return error.InvalidSourceMap; + + const utf8 = try item.data.e_string.stringDecodedUTF8(arena); + defer arena.free(utf8); + + const offset = string_payload.items.len; + + const bound = bun.zstd.compressBound(utf8.len); + try string_payload.ensureUnusedCapacity(bound); + + const unused = string_payload.unusedCapacitySlice(); + const compressed_result = bun.zstd.compress(unused, utf8, 1); + if (compressed_result == .err) { + bun.Output.panic("Unexpected error compressing sourcemap: {s}", .{bun.span(compressed_result.err)}); + } + string_payload.items.len += compressed_result.success; + + const slice = bun.StringPointer{ + .offset = @intCast(offset + string_payload_start_location), + .length = @intCast(string_payload.items.len - offset), + }; + try out.writeInt(u32, slice.offset, .little); + try out.writeInt(u32, slice.length, .little); + } + + try out.writeAll(map_vlq); + + bun.assert(header_list.items.len == string_payload_start_location); + } }; diff --git a/src/allocators.zig b/src/allocators.zig index 31bc747fa6..bd8217163e 100644 --- a/src/allocators.zig +++ b/src/allocators.zig @@ -199,7 +199,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type { const Self = @This(); allocator: Allocator, - mutex: Mutex = Mutex.init(), + mutex: Mutex = .{}, head: *OverflowBlock = undefined, tail: OverflowBlock = OverflowBlock{}, backing_buf: [count]ValueType = undefined, @@ -288,7 +288,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type allocator: Allocator, slice_buf: [count][]const u8 = undefined, slice_buf_used: u16 = 0, - mutex: Mutex = Mutex.init(), + mutex: Mutex = .{}, pub var instance: Self = undefined; var loaded: bool = false; // only need the mutex on append @@ -465,7 +465,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_ index: IndexMap, overflow_list: Overflow = Overflow{}, allocator: Allocator, - mutex: Mutex = Mutex.init(), + mutex: Mutex = .{}, backing_buf: [count]ValueType = undefined, backing_buf_used: u16 = 0, diff --git a/src/api/schema.zig b/src/api/schema.zig index 1207251c48..10e25bb561 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -824,13 +824,20 @@ pub const Api = struct { } }; - pub const StringPointer = packed struct { + /// Represents a slice stored within an externally stored buffer. Safe to serialize. + /// Must be an extern struct to match with `headers-handwritten.h`. + pub const StringPointer = extern struct { /// offset offset: u32 = 0, /// length length: u32 = 0, + comptime { + bun.assert(@alignOf(StringPointer) == @alignOf(u32)); + bun.assert(@sizeOf(StringPointer) == @sizeOf(u64)); + } + pub fn decode(reader: anytype) anyerror!StringPointer { var this = std.mem.zeroes(StringPointer); @@ -844,7 +851,7 @@ pub const Api = struct { try writer.writeInt(this.length); } - pub fn slice(this: *const @This(), bytes: []const u8) []const u8 { + pub fn slice(this: @This(), bytes: []const u8) []const u8 { return bytes[this.offset .. this.offset + this.length]; } }; diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index b93b5eab15..3acc602c38 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -71,8 +71,8 @@ pub const MessageType = enum(u32) { _, }; -var stderr_mutex: bun.Lock = bun.Lock.init(); -var stdout_mutex: bun.Lock = bun.Lock.init(); +var stderr_mutex: bun.Lock = .{}; +var stdout_mutex: bun.Lock = .{}; threadlocal var stderr_lock_count: u16 = 0; threadlocal var stdout_lock_count: u16 = 0; diff --git a/src/bun.js/api/bun/dns_resolver.zig b/src/bun.js/api/bun/dns_resolver.zig index 19c342da20..16b4f8cb84 100644 --- a/src/bun.js/api/bun/dns_resolver.zig +++ b/src/bun.js/api/bun/dns_resolver.zig @@ -1239,7 +1239,7 @@ pub const InternalDNS = struct { const GlobalCache = struct { const MAX_ENTRIES = 256; - lock: bun.Lock = bun.Lock.init(), + lock: bun.Lock = .{}, cache: [MAX_ENTRIES]*Request = undefined, len: usize = 0, diff --git a/src/bun.js/api/js_brotli.zig b/src/bun.js/api/js_brotli.zig index 1464cee9f7..58243e4303 100644 --- a/src/bun.js/api/js_brotli.zig +++ b/src/bun.js/api/js_brotli.zig @@ -7,7 +7,7 @@ const Queue = std.fifo.LinearFifo(JSC.Node.BlobOrStringOrBuffer, .Dynamic); // We cannot free outside the JavaScript thread. const FreeList = struct { - write_lock: bun.Lock = bun.Lock.init(), + write_lock: bun.Lock = .{}, list: std.ArrayListUnmanaged(JSC.Node.BlobOrStringOrBuffer) = .{}, pub fn append(this: *FreeList, slice: []const JSC.Node.BlobOrStringOrBuffer) void { @@ -43,13 +43,13 @@ pub const BrotliEncoder = struct { globalThis: *JSC.JSGlobalObject, input: Queue = Queue.init(bun.default_allocator), - input_lock: bun.Lock = bun.Lock.init(), + input_lock: bun.Lock = .{}, has_called_end: bool = false, callback_value: JSC.Strong = .{}, output: std.ArrayListUnmanaged(u8) = .{}, - output_lock: bun.Lock = bun.Lock.init(), + output_lock: bun.Lock = .{}, has_pending_activity: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), pending_encode_job_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), @@ -358,10 +358,10 @@ pub const BrotliDecoder = struct { pending_decode_job_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), input: Queue = Queue.init(bun.default_allocator), - input_lock: bun.Lock = bun.Lock.init(), + input_lock: bun.Lock = .{}, output: std.ArrayListUnmanaged(u8) = .{}, - output_lock: bun.Lock = bun.Lock.init(), + output_lock: bun.Lock = .{}, freelist: FreeList = .{}, diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index a5fa1f6479..6807ef1490 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -114,7 +114,7 @@ pub const bun_file_import_path = "/node_modules.server.bun"; export var has_bun_garbage_collector_flag_enabled = false; const SourceMap = @import("../sourcemap/sourcemap.zig"); -const ParsedSourceMap = SourceMap.Mapping.ParsedSourceMap; +const ParsedSourceMap = SourceMap.ParsedSourceMap; const MappingList = SourceMap.Mapping.List; const SourceProviderMap = SourceMap.SourceProviderMap; @@ -123,14 +123,14 @@ const uv = bun.windows.libuv; pub const SavedSourceMap = struct { /// This is a pointer to the map located on the VirtualMachine struct map: *HashTable, - mutex: bun.Lock = bun.Lock.init(), + mutex: bun.Lock = .{}, pub const vlq_offset = 24; pub fn init(this: *SavedSourceMap, map: *HashTable) void { this.* = .{ .map = map, - .mutex = bun.Lock.init(), + .mutex = .{}, }; this.map.lockPointers(); @@ -340,7 +340,7 @@ pub const SavedSourceMap = struct { return .{ .map = result }; }, Value.Tag.SourceProviderMap => { - var ptr = Value.from(mapping.value_ptr.*).as(SourceProviderMap); + const ptr: *SourceProviderMap = Value.from(mapping.value_ptr.*).as(SourceProviderMap); this.unlock(); // Do not lock the mutex while we're parsing JSON! @@ -369,6 +369,7 @@ pub const SavedSourceMap = struct { if (Environment.allow_assert) { @panic("Corrupt pointer tag"); } + this.unlock(); return .{}; }, } @@ -1529,7 +1530,7 @@ pub const VirtualMachine = struct { .origin_timer = std.time.Timer.start() catch @panic("Timers are not supported on this system."), .origin_timestamp = getOriginTimestamp(), .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = Lock.init(), + .ref_strings_mutex = .{}, .standalone_module_graph = opts.graph.?, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId() else {}, }; @@ -1638,7 +1639,7 @@ pub const VirtualMachine = struct { .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), .origin_timestamp = getOriginTimestamp(), .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = Lock.init(), + .ref_strings_mutex = .{}, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId() else {}, }; vm.source_mappings.init(&vm.saved_source_map_table); @@ -1779,7 +1780,7 @@ pub const VirtualMachine = struct { .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), .origin_timestamp = getOriginTimestamp(), .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = Lock.init(), + .ref_strings_mutex = .{}, .standalone_module_graph = worker.parent.standalone_module_graph, .worker = worker, .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId() else {}, @@ -3042,7 +3043,7 @@ pub const VirtualMachine = struct { var sourceURL = frame.source_url.toUTF8(bun.default_allocator); defer sourceURL.deinit(); - if (this.source_mappings.resolveMapping( + if (this.resolveSourceMapping( sourceURL.slice(), @max(frame.position.line.zeroBased(), 0), @max(frame.position.column.zeroBased(), 0), @@ -3165,7 +3166,7 @@ pub const VirtualMachine = struct { .prefetched_source_code = null, } else - this.source_mappings.resolveMapping( + this.resolveSourceMapping( top_source_url.slice(), @max(top.position.line.zeroBased(), 0), @max(top.position.column.zeroBased(), 0), @@ -3237,7 +3238,7 @@ pub const VirtualMachine = struct { if (frame == top or frame.position.isInvalid()) continue; const source_url = frame.source_url.toUTF8(bun.default_allocator); defer source_url.deinit(); - if (this.source_mappings.resolveMapping( + if (this.resolveSourceMapping( source_url.slice(), @max(frame.position.line.zeroBased(), 0), @max(frame.position.column.zeroBased(), 0), @@ -3687,6 +3688,37 @@ pub const VirtualMachine = struct { writer.print("\n", .{}) catch {}; } + pub fn resolveSourceMapping( + this: *VirtualMachine, + path: []const u8, + line: i32, + column: i32, + source_handling: SourceMap.SourceContentHandling, + ) ?SourceMap.Mapping.Lookup { + return this.source_mappings.resolveMapping(path, line, column, source_handling) orelse { + if (this.standalone_module_graph) |graph| { + const file = graph.find(path) orelse return null; + const map = file.sourcemap.load() orelse return null; + + map.ref(); + + this.source_mappings.putValue(path, SavedSourceMap.Value.init(map)) catch + bun.outOfMemory(); + + const mapping = SourceMap.Mapping.find(map.mappings, line, column) orelse + return null; + + return .{ + .mapping = mapping, + .source_map = map, + .prefetched_source_code = null, + }; + } + + return null; + }; + } + extern fn Process__emitMessageEvent(global: *JSGlobalObject, value: JSValue) void; extern fn Process__emitDisconnectEvent(global: *JSGlobalObject) void; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index b8cd068110..60509b106c 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -170,7 +170,7 @@ fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: [] const BunDebugHolder = struct { pub var dir: ?std.fs.Dir = null; - pub var lock: bun.Lock = bun.Lock.init(); + pub var lock: bun.Lock = .{}; }; BunDebugHolder.lock.lock(); diff --git a/src/bun.js/node/fs_events.zig b/src/bun.js/node/fs_events.zig index ed1e6100a0..fd019882fb 100644 --- a/src/bun.js/node/fs_events.zig +++ b/src/bun.js/node/fs_events.zig @@ -107,8 +107,8 @@ pub const kFSEventsSystem: c_int = kFSEventStreamEventFlagUnmount | kFSEventStreamEventFlagRootChanged; -var fsevents_mutex: Mutex = Mutex.init(); -var fsevents_default_loop_mutex: Mutex = Mutex.init(); +var fsevents_mutex: Mutex = .{}; +var fsevents_default_loop_mutex: Mutex = .{}; var fsevents_default_loop: ?*FSEventsLoop = null; fn dlsym(handle: ?*anyopaque, comptime Type: type, comptime symbol: [:0]const u8) ?Type { @@ -331,7 +331,7 @@ pub const FSEventsLoop = struct { return error.FailedToCreateCoreFoudationSourceLoop; } - const fs_loop = FSEventsLoop{ .sem = Semaphore.init(0), .mutex = Mutex.init(), .signal_source = signal_source }; + const fs_loop = FSEventsLoop{ .sem = Semaphore.init(0), .mutex = .{}, .signal_source = signal_source }; this.* = fs_loop; this.thread = try std.Thread.spawn(.{}, FSEventsLoop.CFThreadLoop, .{this}); diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index a4a009b943..6fd3bc559c 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -757,7 +757,7 @@ pub const AsyncReaddirRecursiveTask = struct { root_path: PathString = PathString.empty, pending_err: ?Syscall.Error = null, - pending_err_mutex: bun.Lock = bun.Lock.init(), + pending_err_mutex: bun.Lock = .{}, pub usingnamespace bun.New(@This()); diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig index 894b67d332..f207ab2f1d 100644 --- a/src/bun.js/node/node_fs_watcher.zig +++ b/src/bun.js/node/node_fs_watcher.zig @@ -741,7 +741,7 @@ pub const FSWatcher = struct { .ctx = undefined, .count = 0, }, - .mutex = Mutex.init(), + .mutex = .{}, .signal = if (args.signal) |s| s.ref() else null, .persistent = args.persistent, .path_watcher = null, diff --git a/src/bun.js/node/path_watcher.zig b/src/bun.js/node/path_watcher.zig index f3fda8461b..3203d7365d 100644 --- a/src/bun.js/node/path_watcher.zig +++ b/src/bun.js/node/path_watcher.zig @@ -18,7 +18,7 @@ const GenericWatcher = @import("../../watcher.zig"); const sync = @import("../../sync.zig"); const Semaphore = sync.Semaphore; -var default_manager_mutex: Mutex = Mutex.init(); +var default_manager_mutex: Mutex = .{}; var default_manager: ?*PathWatcherManager = null; const FSWatcher = bun.JSC.Node.FSWatcher; @@ -154,7 +154,7 @@ pub const PathWatcherManager = struct { ), .vm = vm, .watcher_count = 0, - .mutex = Mutex.init(), + .mutex = .{}, }; this.* = manager; @@ -795,7 +795,7 @@ pub const PathWatcher = struct { .flushCallback = updateEndCallback, .file_paths = .{}, .ctx = ctx, - .mutex = Mutex.init(), + .mutex = .{}, }; errdefer this.deinit(); @@ -815,7 +815,7 @@ pub const PathWatcher = struct { .recursive = recursive, .flushCallback = updateEndCallback, .ctx = ctx, - .mutex = Mutex.init(), + .mutex = .{}, .file_paths = bun.BabyList([:0]const u8).initCapacity(bun.default_allocator, 1) catch |err| { bun.default_allocator.destroy(this); return err; diff --git a/src/bun.js/rare_data.zig b/src/bun.js/rare_data.zig index b0c929ea36..6451180184 100644 --- a/src/bun.js/rare_data.zig +++ b/src/bun.js/rare_data.zig @@ -45,7 +45,7 @@ mime_types: ?bun.http.MimeType.Map = null, node_fs_stat_watcher_scheduler: ?*StatWatcherScheduler = null, listening_sockets_for_watch_mode: std.ArrayListUnmanaged(bun.FileDescriptor) = .{}, -listening_sockets_for_watch_mode_lock: bun.Lock = bun.Lock.init(), +listening_sockets_for_watch_mode_lock: bun.Lock = .{}, temp_pipe_read_buffer: ?*PipeReadBuffer = null, diff --git a/src/bun.js/webcore/ObjectURLRegistry.zig b/src/bun.js/webcore/ObjectURLRegistry.zig index 90df76d5f9..10a30fdab8 100644 --- a/src/bun.js/webcore/ObjectURLRegistry.zig +++ b/src/bun.js/webcore/ObjectURLRegistry.zig @@ -5,7 +5,7 @@ const UUID = bun.UUID; const assert = bun.assert; const ObjectURLRegistry = @This(); -lock: bun.Lock = bun.Lock.init(), +lock: bun.Lock = .{}, map: std.AutoHashMap(UUID, *RegistryEntry) = std.AutoHashMap(UUID, *RegistryEntry).init(bun.default_allocator), pub const RegistryEntry = struct { diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index 572a42eabd..46b4b15d0d 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -1559,7 +1559,7 @@ pub const Fetch = struct { var fetch_tasklet = try allocator.create(FetchTasklet); fetch_tasklet.* = .{ - .mutex = Mutex.init(), + .mutex = .{}, .scheduled_response_buffer = .{ .allocator = fetch_options.memory_reporter.allocator(), .list = .{ diff --git a/src/bun.zig b/src/bun.zig index b4c731408b..b2286e9b24 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1530,10 +1530,8 @@ pub const StringJoiner = @import("./StringJoiner.zig"); pub const NullableAllocator = @import("./NullableAllocator.zig"); pub const renamer = @import("./renamer.zig"); -pub const sourcemap = struct { - pub usingnamespace @import("./sourcemap/sourcemap.zig"); - pub usingnamespace @import("./sourcemap/CodeCoverage.zig"); -}; +// TODO: Rename to SourceMap as this is a struct. +pub const sourcemap = @import("./sourcemap/sourcemap.zig"); pub fn asByteSlice(buffer: anytype) []const u8 { return switch (@TypeOf(buffer)) { @@ -3263,7 +3261,7 @@ pub fn selfExePath() ![:0]u8 { 4096 + 1 // + 1 for the null terminator ]u8 = undefined; var len: usize = 0; - var lock = Lock.init(); + var lock: Lock = .{}; pub fn load() ![:0]u8 { const init = try std.fs.selfExePath(&value); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index dca549ac2f..81671e0cc4 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -135,7 +135,7 @@ fn tracer(comptime src: std.builtin.SourceLocation, comptime name: [:0]const u8) pub const ThreadPool = struct { pool: *ThreadPoolLib = undefined, workers_assignments: std.AutoArrayHashMap(std.Thread.Id, *Worker) = std.AutoArrayHashMap(std.Thread.Id, *Worker).init(bun.default_allocator), - workers_assignments_lock: bun.Lock = bun.Lock.init(), + workers_assignments_lock: bun.Lock = .{}, v2: *BundleV2 = undefined, diff --git a/src/cli.zig b/src/cli.zig index 31946329c6..93c699b2fc 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -848,6 +848,13 @@ pub const Arguments = struct { Output.prettyErrorln("error: Invalid sourcemap setting: \"{s}\"", .{setting}); Global.crash(); } + + // when using --compile, only `external` works, as we do not + // look at the source map comment. so after we validate the + // user's choice was in the list, we secretly override it + if (ctx.bundler_options.compile) { + opts.source_map = .external; + } } } diff --git a/src/fs.zig b/src/fs.zig index 0649fb4c66..5791d116da 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -204,7 +204,7 @@ pub const FileSystem = struct { .base_ = name, .base_lowercase_ = name_lowercased, .dir = dir.dir, - .mutex = Mutex.init(), + .mutex = .{}, // Call "stat" lazily for performance. The "@material-ui/icons" package // contains a directory with over 11,000 entries in it and running "stat" // for each entry was a big performance issue for that package. @@ -533,7 +533,7 @@ pub const FileSystem = struct { } pub const RealFS = struct { - entries_mutex: Mutex = Mutex.init(), + entries_mutex: Mutex = .{}, entries: *EntriesOption.Map, cwd: string, parent_fs: *FileSystem = undefined, diff --git a/src/http.zig b/src/http.zig index 2c0f19f558..62d4fb50ae 100644 --- a/src/http.zig +++ b/src/http.zig @@ -772,7 +772,7 @@ pub const HTTPThread = struct { queued_tasks: Queue = Queue{}, queued_shutdowns: std.ArrayListUnmanaged(ShutdownMessage) = std.ArrayListUnmanaged(ShutdownMessage){}, - queued_shutdowns_lock: bun.Lock = bun.Lock.init(), + queued_shutdowns_lock: bun.Lock = .{}, has_awoken: std.atomic.Value(bool) = std.atomic.Value(bool).init(false), timer: std.time.Timer, diff --git a/src/js_ast.zig b/src/js_ast.zig index 0ebc3d0e83..4b8f598677 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -2494,6 +2494,12 @@ pub const E = struct { } } + pub fn stringDecodedUTF8(s: *const String, allocator: std.mem.Allocator) !bun.string { + const utf16_decode = try bun.js_lexer.decodeStringLiteralEscapeSequencesToUTF16(try s.string(allocator), allocator); + defer allocator.free(utf16_decode); + return try bun.strings.toUTF8Alloc(allocator, utf16_decode); + } + pub fn hash(s: *const String) u64 { if (s.isBlank()) return 0; diff --git a/src/lock.zig b/src/lock.zig index 5cef40b87d..43e9763bb3 100644 --- a/src/lock.zig +++ b/src/lock.zig @@ -103,11 +103,7 @@ pub const Mutex = struct { }; pub const Lock = struct { - mutex: Mutex, - - pub fn init() Lock { - return Lock{ .mutex = Mutex{} }; - } + mutex: Mutex = .{}, pub inline fn lock(this: *Lock) void { this.mutex.acquire(); diff --git a/src/napi/napi.zig b/src/napi/napi.zig index 4b37c56322..30d97267a3 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -1389,7 +1389,7 @@ pub const ThreadSafeFunction = struct { poll_ref: Async.KeepAlive, thread_count: usize = 0, - owning_thread_lock: Lock = Lock.init(), + owning_thread_lock: Lock = .{}, event_loop: *JSC.EventLoop, tracker: JSC.AsyncTaskTracker, diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 9e1a2bdec5..7c5901a7f5 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -453,7 +453,7 @@ var resolver_Mutex_loaded: bool = false; const BinFolderArray = std.BoundedArray(string, 128); var bin_folders: BinFolderArray = undefined; -var bin_folders_lock: Mutex = Mutex.init(); +var bin_folders_lock: Mutex = .{}; var bin_folders_loaded: bool = false; const Timer = @import("../system_timer.zig").Timer; @@ -606,7 +606,7 @@ pub const Resolver = struct { opts: options.BundleOptions, ) ThisResolver { if (!resolver_Mutex_loaded) { - resolver_Mutex = Mutex.init(); + resolver_Mutex = .{}; resolver_Mutex_loaded = true; } diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 9c8fe9b615..931aa5799b 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -9241,7 +9241,7 @@ pub const Interpreter = struct { root_is_absolute: bool, error_signal: *std.atomic.Value(bool), - err_mutex: bun.Lock = bun.Lock.init(), + err_mutex: bun.Lock = .{}, err: ?Syscall.Error = null, event_loop: JSC.EventLoopHandle, diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index cbc44e3582..8277ae948e 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -53,7 +53,7 @@ pub const ParseUrlResultHint = union(enum) { pub const ParseUrl = struct { /// Populated when `mappings_only` or `all`. - map: ?*Mapping.ParsedSourceMap = null, + map: ?*ParsedSourceMap = null, /// Populated when `all` /// May be `null` even when requested. mapping: ?Mapping = null, @@ -199,7 +199,7 @@ pub fn parseJSON( .fail => |fail| return fail.err, }; - const ptr = Mapping.ParsedSourceMap.new(map_data); + const ptr = ParsedSourceMap.new(map_data); ptr.external_source_names = source_paths_slice.?; break :map ptr; } else null; @@ -248,6 +248,8 @@ pub const Mapping = struct { original: LineColumnOffset, source_index: i32, + pub const List = bun.MultiArrayList(Mapping); + pub const Lookup = struct { mapping: Mapping, source_map: ?*ParsedSourceMap = null, @@ -256,7 +258,10 @@ pub const Mapping = struct { prefetched_source_code: ?[]const u8, /// This creates a bun.String if the source remap *changes* the source url, - /// a case that happens only when the source map points to another file. + /// which is only possible if the executed file differs from the source file: + /// + /// - `bun build --sourcemap`, it is another file on disk + /// - `bun build --compile --sourcemap`, it is an embedded file. pub fn displaySourceURLIfNeeded(lookup: Lookup, base_filename: []const u8) ?bun.String { const source_map = lookup.source_map orelse return null; // See doc comment on `external_source_names` @@ -267,6 +272,10 @@ pub const Mapping = struct { const name = source_map.external_source_names[@intCast(lookup.mapping.source_index)]; + if (source_map.is_standalone_module_graph) { + return bun.String.createUTF8(name); + } + if (std.fs.path.isAbsolute(base_filename)) { const dir = bun.path.dirname(base_filename, .auto); return bun.String.createUTF8(bun.path.joinAbs(dir, .auto, name)); @@ -277,6 +286,9 @@ pub const Mapping = struct { /// Only valid if `lookup.source_map.isExternal()` /// This has the possibility of invoking a call to the filesystem. + /// + /// This data is freed after printed on the assumption that printing + /// errors to the console are rare (this isnt used for error.stack) pub fn getSourceCode(lookup: Lookup, base_filename: []const u8) ?bun.JSC.ZigString.Slice { const bytes = bytes: { if (lookup.prefetched_source_code) |code| { @@ -291,6 +303,18 @@ pub const Mapping = struct { const index = lookup.mapping.source_index; + // Standalone module graph source maps are stored (in memory) compressed. + // They are decompressed on demand. + if (source_map.is_standalone_module_graph) { + const serialized = source_map.standaloneModuleGraphData(); + if (index >= source_map.external_source_names.len) + return null; + + const code = serialized.sourceFileContents(@intCast(index)); + + return bun.JSC.ZigString.Slice.fromUTF8NeverFree(code orelse return null); + } + if (provider.getSourceMap( base_filename, source_map.underlying_provider.load_hint, @@ -325,8 +349,6 @@ pub const Mapping = struct { } }; - pub const List = std.MultiArrayList(Mapping); - pub inline fn generatedLine(mapping: Mapping) i32 { return mapping.generated.lines; } @@ -578,122 +600,129 @@ pub const Mapping = struct { }, }; } +}; - pub const ParseResult = union(enum) { - fail: struct { - loc: Logger.Loc, - err: anyerror, - value: i32 = 0, - msg: []const u8 = "", +pub const ParseResult = union(enum) { + fail: struct { + loc: Logger.Loc, + err: anyerror, + value: i32 = 0, + msg: []const u8 = "", - pub fn toData(this: @This(), path: []const u8) Logger.Data { - return Logger.Data{ - .location = Logger.Location{ - .file = path, - .offset = this.loc.toUsize(), - }, - .text = this.msg, - }; - } - }, - success: ParsedSourceMap, - }; + pub fn toData(this: @This(), path: []const u8) Logger.Data { + return Logger.Data{ + .location = Logger.Location{ + .file = path, + .offset = this.loc.toUsize(), + }, + .text = this.msg, + }; + } + }, + success: ParsedSourceMap, +}; - pub const ParsedSourceMap = struct { - input_line_count: usize = 0, - mappings: Mapping.List = .{}, - /// If this is empty, this implies that the source code is a single file - /// transpiled on-demand. If there are items, then it means this is a file - /// loaded without transpilation but with external sources. This array - /// maps `source_index` to the correct filename. - external_source_names: []const []const u8 = &.{}, - /// In order to load source contents from a source-map after the fact, - // / a handle to the underlying source provider is stored. Within this pointer, - /// a flag is stored if it is known to be an inline or external source map. - /// - /// Source contents are large, we don't preserve them in memory. This has - /// the downside of repeatedly re-decoding sourcemaps if multiple errors - /// are emitted (specifically with Bun.inspect / unhandled; the ones that - /// rely on source contents) - underlying_provider: SourceContentPtr = .{ .data = 0 }, +pub const ParsedSourceMap = struct { + input_line_count: usize = 0, + mappings: Mapping.List = .{}, + /// If this is empty, this implies that the source code is a single file + /// transpiled on-demand. If there are items, then it means this is a file + /// loaded without transpilation but with external sources. This array + /// maps `source_index` to the correct filename. + external_source_names: []const []const u8 = &.{}, + /// In order to load source contents from a source-map after the fact, + // / a handle to the underlying source provider is stored. Within this pointer, + /// a flag is stored if it is known to be an inline or external source map. + /// + /// Source contents are large, we don't preserve them in memory. This has + /// the downside of repeatedly re-decoding sourcemaps if multiple errors + /// are emitted (specifically with Bun.inspect / unhandled; the ones that + /// rely on source contents) + underlying_provider: SourceContentPtr = .{ .data = 0 }, - ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), + ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), - pub usingnamespace bun.NewThreadSafeRefCounted(ParsedSourceMap, deinitFn); + is_standalone_module_graph: bool = false, - const SourceContentPtr = packed struct(u64) { - load_hint: SourceMapLoadHint = .none, - data: u62, + pub usingnamespace bun.NewThreadSafeRefCounted(ParsedSourceMap, deinitFn); - fn fromProvider(p: *SourceProviderMap) SourceContentPtr { - return .{ .data = @intCast(@intFromPtr(p)) }; - } + const SourceContentPtr = packed struct(u64) { + load_hint: SourceMapLoadHint = .none, + data: u62, - pub fn provider(sc: SourceContentPtr) ?*SourceProviderMap { - return @ptrFromInt(sc.data); - } - }; - - pub fn isExternal(psm: *ParsedSourceMap) bool { - return psm.external_source_names.len != 0; + fn fromProvider(p: *SourceProviderMap) SourceContentPtr { + return .{ .data = @intCast(@intFromPtr(p)) }; } - fn deinitFn(this: *ParsedSourceMap) void { - this.deinitWithAllocator(bun.default_allocator); - } - - fn deinitWithAllocator(this: *ParsedSourceMap, allocator: std.mem.Allocator) void { - this.mappings.deinit(allocator); - - if (this.external_source_names.len > 0) { - for (this.external_source_names) |name| - allocator.free(name); - allocator.free(this.external_source_names); - } - - this.destroy(); - } - - pub fn writeVLQs(map: ParsedSourceMap, writer: anytype) !void { - var last_col: i32 = 0; - var last_src: i32 = 0; - var last_ol: i32 = 0; - var last_oc: i32 = 0; - var current_line: i32 = 0; - for ( - map.mappings.items(.generated), - map.mappings.items(.original), - map.mappings.items(.source_index), - 0.., - ) |gen, orig, source_index, i| { - if (current_line != gen.lines) { - assert(gen.lines > current_line); - const inc = gen.lines - current_line; - try writer.writeByteNTimes(';', @intCast(inc)); - current_line = gen.lines; - last_col = 0; - } else if (i != 0) { - try writer.writeByte(','); - } - try encodeVLQ(gen.columns - last_col).writeTo(writer); - last_col = gen.columns; - try encodeVLQ(source_index - last_src).writeTo(writer); - last_src = source_index; - try encodeVLQ(orig.lines - last_ol).writeTo(writer); - last_ol = orig.lines; - try encodeVLQ(orig.columns - last_oc).writeTo(writer); - last_oc = orig.columns; - } - } - - pub fn formatVLQs(map: *const ParsedSourceMap) std.fmt.Formatter(formatVLQsImpl) { - return .{ .data = map }; - } - - fn formatVLQsImpl(map: *const ParsedSourceMap, comptime _: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { - try map.writeVLQs(w); + pub fn provider(sc: SourceContentPtr) ?*SourceProviderMap { + return @ptrFromInt(sc.data); } }; + + pub fn isExternal(psm: *ParsedSourceMap) bool { + return psm.external_source_names.len != 0; + } + + fn deinitFn(this: *ParsedSourceMap) void { + this.deinitWithAllocator(bun.default_allocator); + } + + fn deinitWithAllocator(this: *ParsedSourceMap, allocator: std.mem.Allocator) void { + this.mappings.deinit(allocator); + + if (this.external_source_names.len > 0) { + for (this.external_source_names) |name| + allocator.free(name); + allocator.free(this.external_source_names); + } + + this.destroy(); + } + + fn standaloneModuleGraphData(this: *ParsedSourceMap) *bun.StandaloneModuleGraph.SerializedSourceMap.Loaded { + bun.assert(this.is_standalone_module_graph); + return @ptrFromInt(this.underlying_provider.data); + } + + pub fn writeVLQs(map: ParsedSourceMap, writer: anytype) !void { + var last_col: i32 = 0; + var last_src: i32 = 0; + var last_ol: i32 = 0; + var last_oc: i32 = 0; + var current_line: i32 = 0; + for ( + map.mappings.items(.generated), + map.mappings.items(.original), + map.mappings.items(.source_index), + 0.., + ) |gen, orig, source_index, i| { + if (current_line != gen.lines) { + assert(gen.lines > current_line); + const inc = gen.lines - current_line; + try writer.writeByteNTimes(';', @intCast(inc)); + current_line = gen.lines; + last_col = 0; + } else if (i != 0) { + try writer.writeByte(','); + } + try encodeVLQ(gen.columns - last_col).writeTo(writer); + last_col = gen.columns; + try encodeVLQ(source_index - last_src).writeTo(writer); + last_src = source_index; + try encodeVLQ(orig.lines - last_ol).writeTo(writer); + last_ol = orig.lines; + try encodeVLQ(orig.columns - last_oc).writeTo(writer); + last_oc = orig.columns; + } + } + + pub fn formatVLQs(map: *const ParsedSourceMap) std.fmt.Formatter(formatVLQsImpl) { + return .{ .data = map }; + } + + fn formatVLQsImpl(map: *const ParsedSourceMap, comptime _: []const u8, _: std.fmt.FormatOptions, w: anytype) !void { + try map.writeVLQs(w); + } }; /// For some sourcemap loading code, this enum is used as a hint if it should @@ -826,7 +855,7 @@ pub const SourceProviderMap = opaque { return null; }; if (parsed.map) |ptr| { - ptr.underlying_provider = Mapping.ParsedSourceMap.SourceContentPtr.fromProvider(provider); + ptr.underlying_provider = ParsedSourceMap.SourceContentPtr.fromProvider(provider); ptr.underlying_provider.load_hint = new_load_hint; } return parsed; @@ -1930,3 +1959,5 @@ pub const DebugIDFormatter = struct { }; const assert = bun.assert; + +pub usingnamespace @import("./CodeCoverage.zig"); diff --git a/src/string_builder.zig b/src/string_builder.zig index 64bbc005c6..4dd4be99df 100644 --- a/src/string_builder.zig +++ b/src/string_builder.zig @@ -104,6 +104,17 @@ pub fn append(this: *StringBuilder, slice: string) string { return result; } +pub fn addConcat(this: *StringBuilder, slices: []const string) bun.StringPointer { + var remain = this.allocatedSlice()[this.len..]; + var len: usize = 0; + for (slices) |slice| { + @memcpy(remain[0..slice.len], slice); + remain = remain[slice.len..]; + len += slice.len; + } + return this.add(len); +} + pub fn add(this: *StringBuilder, len: usize) bun.StringPointer { if (comptime Environment.allow_assert) { assert(this.len <= this.cap); // didn't count everything diff --git a/src/watcher.zig b/src/watcher.zig index 45095a1125..aad3b5c4f9 100644 --- a/src/watcher.zig +++ b/src/watcher.zig @@ -236,7 +236,7 @@ const DarwinWatcher = struct { }; const WindowsWatcher = struct { - mutex: Mutex = Mutex.init(), + mutex: Mutex = .{}, iocp: w.HANDLE = undefined, watcher: DirWatcher = undefined, @@ -573,7 +573,7 @@ pub fn NewWatcher(comptime ContextType: type) type { .watched_count = 0, .ctx = ctx, .watchlist = WatchList{}, - .mutex = Mutex.init(), + .mutex = .{}, .cwd = fs.top_level_dir, }; diff --git a/test/bundler/bundler_compile.test.ts b/test/bundler/bundler_compile.test.ts index b07c72552c..e69caaea4a 100644 --- a/test/bundler/bundler_compile.test.ts +++ b/test/bundler/bundler_compile.test.ts @@ -1,6 +1,8 @@ import { itBundled } from "./expectBundled"; import { Database } from "bun:sqlite"; +import { expect } from "bun:test"; import { describe } from "bun:test"; +import { rmSync } from "fs"; describe("bundler", () => { itBundled("compile/HelloWorld", { @@ -312,4 +314,78 @@ describe("bundler", () => { }, run: { stdout: new Array(7).fill("true").join("\n") }, }); + itBundled("compile/SourceMap", { + target: "bun", + compile: true, + files: { + "/entry.ts": /* js */ ` + // this file has comments and weird whitespace, intentionally + // to make it obvious if sourcemaps were generated and mapped properly + if (true) code(); + function code() { + // hello world + throw new + Error("Hello World"); + } + `, + }, + sourceMap: "external", + onAfterBundle(api) { + rmSync(api.join("entry.ts"), {}); // Hide the source files for errors + }, + run: { + exitCode: 1, + validate({ stderr }) { + expect(stderr).toStartWith( + `1 | // this file has comments and weird whitespace, intentionally +2 | // to make it obvious if sourcemaps were generated and mapped properly +3 | if (true) code(); +4 | function code() { +5 | // hello world +6 | throw new + ^ +error: Hello World`, + ); + expect(stderr).toInclude("entry.ts:6:19"); + }, + }, + }); + itBundled("compile/SourceMapBigFile", { + target: "bun", + compile: true, + files: { + "/entry.ts": /* js */ `import * as ReactDom from ${JSON.stringify(require.resolve("react-dom/server"))}; + +// this file has comments and weird whitespace, intentionally +// to make it obvious if sourcemaps were generated and mapped properly +if (true) code(); +function code() { + // hello world + throw new + Error("Hello World"); +} + +console.log(ReactDom);`, + }, + sourceMap: "external", + onAfterBundle(api) { + rmSync(api.join("entry.ts"), {}); // Hide the source files for errors + }, + run: { + exitCode: 1, + validate({ stderr }) { + expect(stderr).toStartWith( + `3 | // this file has comments and weird whitespace, intentionally +4 | // to make it obvious if sourcemaps were generated and mapped properly +5 | if (true) code(); +6 | function code() { +7 | // hello world +8 | throw new + ^ +error: Hello World`, + ); + expect(stderr).toInclude("entry.ts:8:19"); + }, + }, + }); }); diff --git a/test/bundler/expectBundled.ts b/test/bundler/expectBundled.ts index 55fe538c01..7cf1c5c2c2 100644 --- a/test/bundler/expectBundled.ts +++ b/test/bundler/expectBundled.ts @@ -285,7 +285,7 @@ export interface SourceMapTests { mappingsExactMatch?: string; } -/** Keep in mind this is an array/tuple, NOT AN OBJECT. This keeps things more consise */ +/** Keep in mind this is an array/tuple, NOT AN OBJECT. This keeps things more concise */ export type MappingSnapshot = [ // format a string like "file:line:col", for example // "index.ts:5:2" @@ -302,6 +302,7 @@ export interface BundlerTestBundleAPI { outfile: string; outdir: string; + join(subPath: string): string; readFile(file: string): string; writeFile(file: string, contents: string): void; prependFile(file: string, contents: string): void; @@ -1100,6 +1101,7 @@ for (const [key, blob] of build.outputs) { root, outfile: outfile!, outdir: outdir!, + join: (...paths: string[]) => path.join(root, ...paths), readFile, writeFile, expectFile: file => expect(readFile(file)),