From 498186764af3f56236c348cf72eb7b1be7b30ffb Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Sun, 8 Jun 2025 05:07:09 -0700 Subject: [PATCH] Remove a memcpy (#20261) Co-authored-by: Jarred-Sumner <709451+Jarred-Sumner@users.noreply.github.com> --- src/bun.js/ModuleLoader.zig | 24 +++++----- src/bun.js/SavedSourceMap.zig | 4 +- src/bun.js/VirtualMachine.zig | 2 +- src/bun.js/api/JSTranspiler.zig | 10 ++--- src/bun.js/api/TOMLObject.zig | 6 +-- src/bun.js/api/server.zig | 4 +- src/bun.js/test/snapshot.zig | 34 +++++++------- src/bundler/LinkerContext.zig | 4 +- src/bundler/ParseTask.zig | 54 +++++++++++------------ src/bundler/bundle_v2.zig | 18 ++++---- src/bunfig.zig | 12 ++--- src/cache.zig | 12 ++--- src/cli.zig | 2 +- src/cli/audit_command.zig | 8 ++-- src/cli/bunx_command.zig | 4 +- src/cli/create_command.zig | 14 +++--- src/cli/init_command.zig | 4 +- src/cli/pack_command.zig | 8 ++-- src/cli/pm_view_command.zig | 12 ++--- src/cli/publish_command.zig | 8 ++-- src/cli/upgrade_command.zig | 4 +- src/defines.zig | 4 +- src/env_loader.zig | 16 +++---- src/ini.zig | 10 ++--- src/install/install.zig | 50 ++++++++++----------- src/install/lockfile.zig | 12 ++--- src/install/lockfile/OverrideMap.zig | 42 +++++++++--------- src/install/lockfile/Package.zig | 38 ++++++++-------- src/install/npm.zig | 16 +++---- src/install/resolvers/folder_resolver.zig | 4 +- src/js/node/net.ts | 4 +- src/js_ast.zig | 4 +- src/js_lexer.zig | 26 +++-------- src/js_parser.zig | 4 +- src/js_printer.zig | 14 +++--- src/json_parser.zig | 28 ++++++------ src/resolver/package_json.zig | 2 +- src/resolver/resolver.zig | 2 +- src/resolver/tsconfig_json.zig | 22 ++++----- src/sourcemap/sourcemap.zig | 6 +-- src/transpiler.zig | 28 ++++++------ 41 files changed, 283 insertions(+), 297 deletions(-) diff --git a/src/bun.js/ModuleLoader.zig b/src/bun.js/ModuleLoader.zig index 4df6cf1263..d190e053ea 100644 --- a/src/bun.js/ModuleLoader.zig +++ b/src/bun.js/ModuleLoader.zig @@ -1026,6 +1026,8 @@ pub fn transpileSourceCode( }, }; + const source = &parse_result.source; + if (parse_result.loader == .wasm) { return transpileSourceCode( jsc_vm, @@ -1071,7 +1073,7 @@ pub fn transpileSourceCode( if (loader == .json) { return ResolvedSource{ .allocator = null, - .source_code = bun.String.createUTF8(parse_result.source.contents), + .source_code = bun.String.createUTF8(source.contents), .specifier = input_specifier, .source_url = input_specifier.createIfDifferent(path.text), .tag = ResolvedSource.Tag.json_for_object_loader, @@ -1082,8 +1084,8 @@ pub fn transpileSourceCode( return ResolvedSource{ .allocator = null, .source_code = switch (comptime flags) { - .print_source_and_clone => bun.String.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable), - .print_source => bun.String.init(parse_result.source.contents), + .print_source_and_clone => bun.String.init(jsc_vm.allocator.dupe(u8, source.contents) catch unreachable), + .print_source => bun.String.init(source.contents), else => @compileError("unreachable"), }, .specifier = input_specifier, @@ -1115,7 +1117,7 @@ pub fn transpileSourceCode( const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); return ResolvedSource{ .allocator = null, - .source_code = bun.String.createLatin1(parse_result.source.contents), + .source_code = bun.String.createLatin1(source.contents), .specifier = input_specifier, .source_url = input_specifier.createIfDifferent(path.text), .already_bundled = true, @@ -1127,7 +1129,7 @@ pub fn transpileSourceCode( if (parse_result.empty) { const was_cjs = (loader == .js or loader == .ts) and brk: { - const ext = std.fs.path.extension(parse_result.source.path.text); + const ext = std.fs.path.extension(source.path.text); break :brk strings.eqlComptime(ext, ".cjs") or strings.eqlComptime(ext, ".cts"); }; if (was_cjs) { @@ -1143,7 +1145,7 @@ pub fn transpileSourceCode( } if (cache.entry) |*entry| { - jsc_vm.source_mappings.putMappings(parse_result.source, .{ + jsc_vm.source_mappings.putMappings(source, .{ .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, .allocator = bun.default_allocator, }) catch {}; @@ -1167,10 +1169,10 @@ pub fn transpileSourceCode( .source_url = input_specifier.createIfDifferent(path.text), .is_commonjs_module = entry.metadata.module_type == .cjs, .tag = brk: { - if (entry.metadata.module_type == .cjs and parse_result.source.path.isFile()) { + if (entry.metadata.module_type == .cjs and source.path.isFile()) { const actual_package_json: *PackageJSON = package_json orelse brk2: { // this should already be cached virtually always so it's fine to do this - const dir_info = (jsc_vm.transpiler.resolver.readDirInfo(parse_result.source.path.name.dir) catch null) orelse + const dir_info = (jsc_vm.transpiler.resolver.readDirInfo(source.path.name.dir) catch null) orelse break :brk .javascript; break :brk2 dir_info.package_json orelse dir_info.enclosing_package_json; @@ -1204,7 +1206,7 @@ pub fn transpileSourceCode( return error.UnexpectedPendingResolution; } - if (parse_result.source.contents_is_recycled) { + if (source.contents_is_recycled) { // this shared buffer is about to become owned by the AsyncModule struct jsc_vm.transpiler.resolver.caches.fs.resetSharedBuffer( jsc_vm.transpiler.resolver.caches.fs.sharedBuffer(), @@ -1334,7 +1336,7 @@ pub fn transpileSourceCode( // return ResolvedSource{ // .allocator = if (jsc_vm.has_loaded) &jsc_vm.allocator else null, - // .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable), + // .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, source.contents) catch unreachable), // .specifier = ZigString.init(specifier), // .source_url = input_specifier.createIfDifferent(path.text), // .tag = ResolvedSource.Tag.wasm, @@ -2482,7 +2484,7 @@ pub const RuntimeTranspilerStore = struct { } if (cache.entry) |*entry| { - vm.source_mappings.putMappings(parse_result.source, .{ + vm.source_mappings.putMappings(&parse_result.source, .{ .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, .allocator = bun.default_allocator, }) catch {}; diff --git a/src/bun.js/SavedSourceMap.zig b/src/bun.js/SavedSourceMap.zig index b68eda6e5a..d909f854f1 100644 --- a/src/bun.js/SavedSourceMap.zig +++ b/src/bun.js/SavedSourceMap.zig @@ -130,7 +130,7 @@ pub fn removeZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *a pub const HashTable = std.HashMap(u64, *anyopaque, bun.IdentityContext(u64), 80); -pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { +pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void { try this.putMappings(source, chunk.buffer); } @@ -159,7 +159,7 @@ pub fn deinit(this: *SavedSourceMap) void { this.map.deinit(); } -pub fn putMappings(this: *SavedSourceMap, source: logger.Source, mappings: MutableString) !void { +pub fn putMappings(this: *SavedSourceMap, source: *const logger.Source, mappings: MutableString) !void { try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, mappings.list.items.ptr))); } diff --git a/src/bun.js/VirtualMachine.zig b/src/bun.js/VirtualMachine.zig index 575c706ea0..82c1badd9c 100644 --- a/src/bun.js/VirtualMachine.zig +++ b/src/bun.js/VirtualMachine.zig @@ -349,7 +349,7 @@ const SourceMapHandlerGetter = struct { /// When the inspector is enabled, we want to generate an inline sourcemap. /// And, for now, we also store it in source_mappings like normal /// This is hideously expensive memory-wise... - pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { + pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void { var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator); defer temp_json_buffer.deinit(); temp_json_buffer = try chunk.printSourceMapContentsAtOffset(source, temp_json_buffer, true, SavedSourceMap.vlq_offset, true); diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index d7f2ac4cb4..b9ec44b4d4 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -419,7 +419,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st if (TSConfigJSON.parse( allocator, &transpiler.log, - logger.Source.initPathString("tsconfig.json", transpiler.tsconfig_buf), + &logger.Source.initPathString("tsconfig.json", transpiler.tsconfig_buf), &JSC.VirtualMachine.get().transpiler.resolver.caches.json, ) catch null) |parsed_tsconfig| { transpiler.tsconfig = parsed_tsconfig; @@ -453,7 +453,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st if (out.isEmpty()) break :macros; transpiler.macros_buf = out.toOwnedSlice(allocator) catch bun.outOfMemory(); - const source = logger.Source.initPathString("macros.json", transpiler.macros_buf); + const source = &logger.Source.initPathString("macros.json", transpiler.macros_buf); const json = (JSC.VirtualMachine.get().transpiler.resolver.caches.json.parseJSON( &transpiler.log, source, @@ -461,7 +461,7 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st .json, false, ) catch null) orelse break :macros; - transpiler.macro_map = PackageJSON.parseMacrosJSON(allocator, json, &transpiler.log, &source); + transpiler.macro_map = PackageJSON.parseMacrosJSON(allocator, json, &transpiler.log, source); } } @@ -769,7 +769,7 @@ pub fn finalize(this: *JSTranspiler) void { fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []const u8, loader: ?Loader, macro_js_ctx: Transpiler.MacroJSValueType) ?Transpiler.ParseResult { const name = this.transpiler_options.default_loader.stdinName(); - const source = logger.Source.initPathString(name, code); + const source = &logger.Source.initPathString(name, code); const jsx = if (this.transpiler_options.tsconfig != null) this.transpiler_options.tsconfig.?.mergeJSX(this.transpiler.options.jsx) @@ -784,7 +784,7 @@ fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []con .loader = loader orelse this.transpiler_options.default_loader, .jsx = jsx, .path = source.path, - .virtual_source = &source, + .virtual_source = source, .replace_exports = this.transpiler_options.runtime.replace_exports, .macro_js_ctx = macro_js_ctx, // .allocator = this. diff --git a/src/bun.js/api/TOMLObject.zig b/src/bun.js/api/TOMLObject.zig index 7cbba1b64c..5a1bb0103c 100644 --- a/src/bun.js/api/TOMLObject.zig +++ b/src/bun.js/api/TOMLObject.zig @@ -29,8 +29,8 @@ pub fn parse( var input_slice = try arguments[0].toSlice(globalThis, bun.default_allocator); defer input_slice.deinit(); - var source = logger.Source.initPathString("input.toml", input_slice.slice()); - const parse_result = TOMLParser.parse(&source, &log, allocator, false) catch { + const source = &logger.Source.initPathString("input.toml", input_slice.slice()); + const parse_result = TOMLParser.parse(source, &log, allocator, false) catch { return globalThis.throwValue(try log.toJS(globalThis, default_allocator, "Failed to parse toml")); }; @@ -41,7 +41,7 @@ pub fn parse( *js_printer.BufferPrinter, &writer, parse_result, - &source, + source, .{ .mangled_props = null, }, diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 028fa4c1e2..3ca6c5a891 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -1744,12 +1744,12 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d const buffer_writer = js_printer.BufferWriter.init(allocator); var writer = js_printer.BufferPrinter.init(buffer_writer); defer writer.ctx.buffer.deinit(); - var source = logger.Source.initEmptyFile("info.json"); + const source = &logger.Source.initEmptyFile("info.json"); _ = js_printer.printJSON( *js_printer.BufferPrinter, &writer, bun.Global.BunInfo.generate(*Transpiler, &JSC.VirtualMachine.get().transpiler, allocator) catch unreachable, - &source, + source, .{ .mangled_props = null }, ) catch unreachable; diff --git a/src/bun.js/test/snapshot.zig b/src/bun.js/test/snapshot.zig index 779b00524f..897db792bd 100644 --- a/src/bun.js/test/snapshot.zig +++ b/src/bun.js/test/snapshot.zig @@ -134,12 +134,12 @@ pub const Snapshots = struct { remain[0] = 0; const snapshot_file_path = snapshot_file_path_buf[0 .. snapshot_file_path_buf.len - remain.len :0]; - const source = logger.Source.initPathString(snapshot_file_path, this.file_buf.items); + const source = &logger.Source.initPathString(snapshot_file_path, this.file_buf.items); var parser = try js_parser.Parser.init( opts, &temp_log, - &source, + source, vm.transpiler.options.define, this.allocator, ); @@ -256,7 +256,7 @@ pub const Snapshots = struct { const file_text = try file.file.readToEndAlloc(arena, std.math.maxInt(usize)); - var source = bun.logger.Source.initPathString(test_filename, file_text); + const source = &bun.logger.Source.initPathString(test_filename, file_text); var result_text = std.ArrayList(u8).init(arena); @@ -268,14 +268,14 @@ pub const Snapshots = struct { var last_col: c_ulong = 1; for (ils_info.items) |ils| { if (ils.line == last_line and ils.col == last_col) { - try log.addErrorFmt(&source, .{ .start = @intCast(uncommitted_segment_end) }, arena, "Failed to update inline snapshot: Multiple inline snapshots for the same call are not supported", .{}); + try log.addErrorFmt(source, .{ .start = @intCast(uncommitted_segment_end) }, arena, "Failed to update inline snapshot: Multiple inline snapshots for the same call are not supported", .{}); continue; } inline_snapshot_dbg("Finding byte for {}/{}", .{ ils.line, ils.col }); const byte_offset_add = logger.Source.lineColToByteOffset(file_text[last_byte..], last_line, last_col, ils.line, ils.col) orelse { inline_snapshot_dbg("-> Could not find byte", .{}); - try log.addErrorFmt(&source, .{ .start = @intCast(uncommitted_segment_end) }, arena, "Failed to update inline snapshot: Ln {d}, Col {d} not found", .{ ils.line, ils.col }); + try log.addErrorFmt(source, .{ .start = @intCast(uncommitted_segment_end) }, arena, "Failed to update inline snapshot: Ln {d}, Col {d} not found", .{ ils.line, ils.col }); continue; }; @@ -297,7 +297,7 @@ pub const Snapshots = struct { }; const fn_name = ils.kind; if (!bun.strings.startsWith(file_text[next_start..], fn_name)) { - try log.addErrorFmt(&source, .{ .start = @intCast(next_start) }, arena, "Failed to update inline snapshot: Could not find '{s}' here", .{fn_name}); + try log.addErrorFmt(source, .{ .start = @intCast(next_start) }, arena, "Failed to update inline snapshot: Could not find '{s}' here", .{fn_name}); continue; } next_start += fn_name.len; @@ -310,14 +310,14 @@ pub const Snapshots = struct { } try lexer.next(); var parser: bun.js_parser.TSXParser = undefined; - try bun.js_parser.TSXParser.init(arena, &log, &source, vm.transpiler.options.define, lexer, opts, &parser); + try bun.js_parser.TSXParser.init(arena, &log, source, vm.transpiler.options.define, lexer, opts, &parser); try parser.lexer.expect(.t_open_paren); const after_open_paren_loc = parser.lexer.loc().start; if (parser.lexer.token == .t_close_paren) { // zero args if (ils.has_matchers) { - try log.addErrorFmt(&source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Snapshot has matchers and yet has no arguments", .{}); + try log.addErrorFmt(source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Snapshot has matchers and yet has no arguments", .{}); continue; } const close_paren_loc = parser.lexer.loc().start; @@ -325,7 +325,7 @@ pub const Snapshots = struct { break :blk .{ after_open_paren_loc, close_paren_loc, false }; } if (parser.lexer.token == .t_dot_dot_dot) { - try log.addErrorFmt(&source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Spread is not allowed", .{}); + try log.addErrorFmt(source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Spread is not allowed", .{}); continue; } @@ -346,7 +346,7 @@ pub const Snapshots = struct { break :blk .{ after_expr_loc, after_comma_loc, true }; } else { if (expr_1.data != .e_string) { - try log.addErrorFmt(&source, expr_1.loc, arena, "Failed to update inline snapshot: Argument must be a string literal", .{}); + try log.addErrorFmt(source, expr_1.loc, arena, "Failed to update inline snapshot: Argument must be a string literal", .{}); continue; } break :blk .{ before_expr_loc, after_expr_loc, false }; @@ -354,7 +354,7 @@ pub const Snapshots = struct { } if (parser.lexer.token == .t_dot_dot_dot) { - try log.addErrorFmt(&source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Spread is not allowed", .{}); + try log.addErrorFmt(source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Spread is not allowed", .{}); continue; } @@ -363,11 +363,11 @@ pub const Snapshots = struct { const after_expr_2_loc = parser.lexer.loc().start; if (!ils.has_matchers) { - try log.addErrorFmt(&source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Snapshot does not have matchers and yet has two arguments", .{}); + try log.addErrorFmt(source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Snapshot does not have matchers and yet has two arguments", .{}); continue; } if (expr_2.data != .e_string) { - try log.addErrorFmt(&source, expr_2.loc, arena, "Failed to update inline snapshot: Argument must be a string literal", .{}); + try log.addErrorFmt(source, expr_2.loc, arena, "Failed to update inline snapshot: Argument must be a string literal", .{}); continue; } @@ -375,7 +375,7 @@ pub const Snapshots = struct { try parser.lexer.expect(.t_comma); } if (parser.lexer.token != .t_close_paren) { - try log.addErrorFmt(&source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Snapshot expects at most two arguments", .{}); + try log.addErrorFmt(source, parser.lexer.loc(), arena, "Failed to update inline snapshot: Snapshot expects at most two arguments", .{}); continue; } try parser.lexer.expect(.t_close_paren); @@ -387,7 +387,7 @@ pub const Snapshots = struct { inline_snapshot_dbg(" -> Found update range {}-{}", .{ final_start_usize, final_end_usize }); if (final_end_usize < final_start_usize or final_start_usize < uncommitted_segment_end) { - try log.addErrorFmt(&source, .{ .start = final_start }, arena, "Failed to update inline snapshot: Did not advance.", .{}); + try log.addErrorFmt(source, .{ .start = final_start }, arena, "Failed to update inline snapshot: Did not advance.", .{}); continue; } @@ -452,12 +452,12 @@ pub const Snapshots = struct { // 4. write out result_text to the file file.file.seekTo(0) catch |e| { - try log.addErrorFmt(&source, .{ .start = 0 }, arena, "Failed to update inline snapshot: Seek file error: {s}", .{@errorName(e)}); + try log.addErrorFmt(source, .{ .start = 0 }, arena, "Failed to update inline snapshot: Seek file error: {s}", .{@errorName(e)}); continue; }; file.file.writeAll(result_text.items) catch |e| { - try log.addErrorFmt(&source, .{ .start = 0 }, arena, "Failed to update inline snapshot: Write file error: {s}", .{@errorName(e)}); + try log.addErrorFmt(source, .{ .start = 0 }, arena, "Failed to update inline snapshot: Write file error: {s}", .{@errorName(e)}); continue; }; if (result_text.items.len < file_text.len) { diff --git a/src/bundler/LinkerContext.zig b/src/bundler/LinkerContext.zig index 8f0ec14cb5..1a2d4f3246 100644 --- a/src/bundler/LinkerContext.zig +++ b/src/bundler/LinkerContext.zig @@ -888,11 +888,11 @@ pub const LinkerContext = struct { const parent_source_index = other_source_index; if (parent_result_tla_keyword.len > 0) { - const source = input_files[other_source_index]; + const source = &input_files[other_source_index]; tla_pretty_path = source.path.pretty; notes.append(Logger.Data{ .text = std.fmt.allocPrint(c.allocator, "The top-level await in {s} is here:", .{tla_pretty_path}) catch bun.outOfMemory(), - .location = .initOrNull(&source, parent_result_tla_keyword), + .location = .initOrNull(source, parent_result_tla_keyword), }) catch bun.outOfMemory(); break; } diff --git a/src/bundler/ParseTask.zig b/src/bundler/ParseTask.zig index 37419eae0e..9f1acebf52 100644 --- a/src/bundler/ParseTask.zig +++ b/src/bundler/ParseTask.zig @@ -283,17 +283,17 @@ fn getEmptyCSSAST( transpiler: *Transpiler, opts: js_parser.Parser.Options, allocator: std.mem.Allocator, - source: Logger.Source, + source: *const Logger.Source, ) !JSAst { const root = Expr.init(E.Object, E.Object{}, Logger.Loc{ .start = 0 }); - var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); ast.css = bun.create(allocator, bun.css.BundlerStyleSheet, bun.css.BundlerStyleSheet.empty(allocator)); return ast; } -fn getEmptyAST(log: *Logger.Log, transpiler: *Transpiler, opts: js_parser.Parser.Options, allocator: std.mem.Allocator, source: Logger.Source, comptime RootType: type) !JSAst { +fn getEmptyAST(log: *Logger.Log, transpiler: *Transpiler, opts: js_parser.Parser.Options, allocator: std.mem.Allocator, source: *const Logger.Source, comptime RootType: type) !JSAst { const root = Expr.init(RootType, RootType{}, Logger.Loc.Empty); - return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); + return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); } const FileLoaderHash = struct { @@ -307,7 +307,7 @@ fn getAST( opts: js_parser.Parser.Options, allocator: std.mem.Allocator, resolver: *Resolver, - source: Logger.Source, + source: *const Logger.Source, loader: Loader, unique_key_prefix: u64, unique_key_for_additional_file: *FileLoaderHash, @@ -322,7 +322,7 @@ fn getAST( opts, transpiler.options.define, log, - &source, + source, )) |res| JSAst.init(res.ast) else switch (opts.module_type == .esm) { @@ -340,7 +340,7 @@ fn getAST( const trace = bun.perf.trace("Bundler.ParseJSON"); defer trace.end(); const root = (try resolver.caches.json.parseJSON(log, source, allocator, if (v == .jsonc) .jsonc else .json, true)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); - return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); + return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); }, .toml => { const trace = bun.perf.trace("Bundler.ParseTOML"); @@ -350,22 +350,22 @@ fn getAST( temp_log.cloneToWithRecycled(log, true) catch bun.outOfMemory(); temp_log.msgs.clearAndFree(); } - const root = try TOML.parse(&source, &temp_log, allocator, false); - return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, &temp_log, root, &source, "")).?); + const root = try TOML.parse(source, &temp_log, allocator, false); + return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, &temp_log, root, source, "")).?); }, .text => { const root = Expr.init(E.String, E.String{ .data = source.contents, }, Logger.Loc{ .start = 0 }); - var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); - ast.addUrlForCss(allocator, &source, "text/plain", null); + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); + ast.addUrlForCss(allocator, source, "text/plain", null); return ast; }, .sqlite_embedded, .sqlite => { if (!transpiler.options.target.isBun()) { log.addError( - &source, + source, Logger.Loc.Empty, "To use the \"sqlite\" loader, set target to \"bun\"", ) catch bun.outOfMemory(); @@ -426,13 +426,13 @@ fn getAST( .name = "db", }, Logger.Loc{ .start = 0 }); - return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); + return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); }, .napi => { // (dap-eval-cb "source.contents.ptr") if (transpiler.options.target == .browser) { log.addError( - &source, + source, Logger.Loc.Empty, "Loading .node files won't work in the browser. Make sure to set target to \"bun\" or \"node\"", ) catch bun.outOfMemory(); @@ -460,10 +460,10 @@ fn getAST( .key = unique_key, .content_hash = ContentHasher.run(source.contents), }; - return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); + return JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); }, .html => { - var scanner = HTMLScanner.init(allocator, log, &source); + var scanner = HTMLScanner.init(allocator, log, source); try scanner.scan(source.contents); // Reuse existing code for creating the AST @@ -475,7 +475,7 @@ fn getAST( opts, log, Expr.init(E.Missing, E.Missing{}, Logger.Loc.Empty), - &source, + source, "", )).?; ast.import_records = scanner.import_records; @@ -518,7 +518,7 @@ fn getAST( const source_code = source.contents; var temp_log = bun.logger.Log.init(allocator); defer { - temp_log.appendToMaybeRecycled(log, &source) catch bun.outOfMemory(); + temp_log.appendToMaybeRecycled(log, source) catch bun.outOfMemory(); } const css_module_suffix = ".module.css"; @@ -540,7 +540,7 @@ fn getAST( )) { .result => |v| v, .err => |e| { - try e.addToLogger(&temp_log, &source, allocator); + try e.addToLogger(&temp_log, source, allocator); return error.SyntaxError; }, }; @@ -557,7 +557,7 @@ fn getAST( .targets = bun.css.Targets.forBundlerTarget(transpiler.options.target), .unused_symbols = .{}, }, &extra).asErr()) |e| { - try e.addToLogger(&temp_log, &source, allocator); + try e.addToLogger(&temp_log, source, allocator); return error.MinifyError; } if (css_ast.local_scope.count() > 0) { @@ -566,7 +566,7 @@ fn getAST( // If this is a css module, the final exports object wil be set in `generateCodeForLazyExport`. const root = Expr.init(E.Object, E.Object{}, Logger.Loc{ .start = 0 }); const css_ast_heap = bun.create(allocator, bun.css.BundlerStyleSheet, css_ast); - var ast = JSAst.init((try js_parser.newLazyExportASTImpl(allocator, transpiler.options.define, opts, &temp_log, root, &source, "", extra.symbols)).?); + var ast = JSAst.init((try js_parser.newLazyExportASTImpl(allocator, transpiler.options.define, opts, &temp_log, root, source, "", extra.symbols)).?); ast.css = css_ast_heap; ast.import_records = import_records; return ast; @@ -608,8 +608,8 @@ fn getAST( .key = unique_key, .content_hash = content_hash, }; - var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, &source, "")).?); - ast.addUrlForCss(allocator, &source, null, unique_key); + var ast = JSAst.init((try js_parser.newLazyExportAST(allocator, transpiler.options.define, opts, log, root, source, "")).?); + ast.addUrlForCss(allocator, source, null, unique_key); return ast; }, } @@ -1080,7 +1080,7 @@ fn runWithSourceCode( var transpiler = &data.transpiler; errdefer transpiler.resetStore(); var resolver: *Resolver = &transpiler.resolver; - var file_path = task.path; + const file_path = &task.path; var loader = task.loader orelse file_path.loader(&transpiler.options.loaders) orelse options.Loader.file; // Do not process files as HTML if any of the following are true: @@ -1159,8 +1159,8 @@ fn runWithSourceCode( bun.assert(transpiler.options.target == .browser); } - var source = Logger.Source{ - .path = file_path, + const source = &Logger.Source{ + .path = file_path.*, .index = task.source_index, .contents = entry.contents, .contents_is_recycled = false, @@ -1262,7 +1262,7 @@ fn runWithSourceCode( return .{ .ast = ast, - .source = source, + .source = source.*, .log = log.*, .use_directive = use_directive, .unique_key_for_additional_file = unique_key_for_additional_file.key, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index ff200c2dd4..60e9fe2199 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -566,7 +566,7 @@ pub const BundleV2 = struct { }).disableHTML(); const idx = this.enqueueParseTask( &resolve_result, - .{ + &.{ .path = path.*, .contents = "", }, @@ -1128,7 +1128,7 @@ pub const BundleV2 = struct { pub fn enqueueParseTask( this: *BundleV2, resolve_result: *const _resolver.Result, - source: Logger.Source, + source: *const Logger.Source, loader_: Loader, known_target: options.Target, ) OOM!Index.Int { @@ -1138,7 +1138,7 @@ pub const BundleV2 = struct { const loader = loader_.disableHTML(); this.graph.input_files.append(bun.default_allocator, .{ - .source = source, + .source = source.*, .loader = loader, .side_effects = loader.sideEffects(), }) catch bun.outOfMemory(); @@ -1170,7 +1170,7 @@ pub const BundleV2 = struct { pub fn enqueueParseTask2( this: *BundleV2, - source: Logger.Source, + source: *const Logger.Source, loader: Loader, known_target: options.Target, ) OOM!Index.Int { @@ -1178,7 +1178,7 @@ pub const BundleV2 = struct { this.graph.ast.append(bun.default_allocator, JSAst.empty) catch unreachable; this.graph.input_files.append(bun.default_allocator, .{ - .source = source, + .source = source.*, .loader = loader, .side_effects = loader.sideEffects(), }) catch bun.outOfMemory(); @@ -3301,24 +3301,24 @@ pub const BundleV2 = struct { result.source, ) catch bun.outOfMemory(); - var ssr_source = result.source; + const ssr_source = &result.source; ssr_source.path.pretty = ssr_source.path.text; ssr_source.path = this.pathWithPrettyInitialized(ssr_source.path, .bake_server_components_ssr) catch bun.outOfMemory(); const ssr_index = this.enqueueParseTask2( ssr_source, - this.graph.input_files.items(.loader)[result.source.index.get()], + graph.input_files.items(.loader)[result.source.index.get()], .bake_server_components_ssr, ) catch bun.outOfMemory(); break :brk .{ reference_source_index, ssr_index }; } else brk: { // Enqueue only one file - var server_source = result.source; + const server_source = &result.source; server_source.path.pretty = server_source.path.text; server_source.path = this.pathWithPrettyInitialized(server_source.path, this.transpiler.options.target) catch bun.outOfMemory(); const server_index = this.enqueueParseTask2( server_source, - this.graph.input_files.items(.loader)[result.source.index.get()], + graph.input_files.items(.loader)[result.source.index.get()], .browser, ) catch bun.outOfMemory(); diff --git a/src/bunfig.zig b/src/bunfig.zig index 6be7cc5d6f..09f62008db 100644 --- a/src/bunfig.zig +++ b/src/bunfig.zig @@ -975,21 +975,21 @@ pub const Bunfig = struct { } }; - pub fn parse(allocator: std.mem.Allocator, source: logger.Source, ctx: Command.Context, comptime cmd: Command.Tag) !void { + pub fn parse(allocator: std.mem.Allocator, source: *const logger.Source, ctx: Command.Context, comptime cmd: Command.Tag) !void { const log_count = ctx.log.errors + ctx.log.warnings; - const expr = if (strings.eqlComptime(source.path.name.ext[1..], "toml")) TOML.parse(&source, ctx.log, allocator, true) catch |err| { + const expr = if (strings.eqlComptime(source.path.name.ext[1..], "toml")) TOML.parse(source, ctx.log, allocator, true) catch |err| { if (ctx.log.errors + ctx.log.warnings == log_count) { try ctx.log.addErrorOpts("Failed to parse", .{ - .source = &source, + .source = source, .redact_sensitive_information = true, }); } return err; - } else JSONParser.parseTSConfig(&source, ctx.log, allocator, true) catch |err| { + } else JSONParser.parseTSConfig(source, ctx.log, allocator, true) catch |err| { if (ctx.log.errors + ctx.log.warnings == log_count) { try ctx.log.addErrorOpts("Failed to parse", .{ - .source = &source, + .source = source, .redact_sensitive_information = true, }); } @@ -1000,7 +1000,7 @@ pub const Bunfig = struct { .json = expr, .log = ctx.log, .allocator = allocator, - .source = &source, + .source = source, .bunfig = &ctx.args, .ctx = ctx, }; diff --git a/src/cache.zig b/src/cache.zig index 1587c65753..0321856e6c 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -303,16 +303,16 @@ pub const Json = struct { pub fn init(_: std.mem.Allocator) Json { return Json{}; } - fn parse(_: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime func: anytype, comptime force_utf8: bool) anyerror!?js_ast.Expr { + fn parse(_: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator, comptime func: anytype, comptime force_utf8: bool) anyerror!?js_ast.Expr { var temp_log = logger.Log.init(allocator); defer { - temp_log.appendToMaybeRecycled(log, &source) catch {}; + temp_log.appendToMaybeRecycled(log, source) catch {}; } - return func(&source, &temp_log, allocator, force_utf8) catch handler: { + return func(source, &temp_log, allocator, force_utf8) catch handler: { break :handler null; }; } - pub fn parseJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, mode: enum { json, jsonc }, comptime force_utf8: bool) anyerror!?js_ast.Expr { + pub fn parseJSON(cache: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator, mode: enum { json, jsonc }, comptime force_utf8: bool) anyerror!?js_ast.Expr { // tsconfig.* and jsconfig.* files are JSON files, but they are not valid JSON files. // They are JSON files with comments and trailing commas. // Sometimes tooling expects this to work. @@ -323,11 +323,11 @@ pub const Json = struct { return try parse(cache, log, source, allocator, json_parser.parse, force_utf8); } - pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator, comptime force_utf8: bool) anyerror!?js_ast.Expr { + pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator, comptime force_utf8: bool) anyerror!?js_ast.Expr { return try parse(cache, log, source, allocator, json_parser.parseTSConfig, force_utf8); } - pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { + pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { return try parse(cache, log, source, allocator, json_parser.parseTSConfig, true); } }; diff --git a/src/cli.zig b/src/cli.zig index f27a0eee79..562a34fac2 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -357,7 +357,7 @@ pub const Arguments = struct { ctx.log.level = original_level; } ctx.log.level = logger.Log.Level.warn; - try Bunfig.parse(allocator, logger.Source.initPathString(bun.asByteSlice(config_path), contents), ctx, cmd); + try Bunfig.parse(allocator, &logger.Source.initPathString(bun.asByteSlice(config_path), contents), ctx, cmd); } fn getHomeConfigPath(buf: *bun.PathBuffer) ?[:0]const u8 { diff --git a/src/cli/audit_command.zig b/src/cli/audit_command.zig index 2b47fb208c..ce796ef9a6 100644 --- a/src/cli/audit_command.zig +++ b/src/cli/audit_command.zig @@ -113,11 +113,11 @@ pub const AuditCommand = struct { Output.writer().writeByte('\n') catch {}; if (response_text.len > 0) { - const source = logger.Source.initPathString("audit-response.json", response_text); + const source = &logger.Source.initPathString("audit-response.json", response_text); var log = logger.Log.init(ctx.allocator); defer log.deinit(); - const expr = @import("../json_parser.zig").parse(&source, &log, ctx.allocator, true) catch { + const expr = @import("../json_parser.zig").parse(source, &log, ctx.allocator, true) catch { Output.prettyErrorln("error: audit request failed to parse json. Is the registry down?", .{}); return 1; // If we can't parse then safe to assume a similar failure }; @@ -543,11 +543,11 @@ fn printEnhancedAuditReport( pm: *PackageManager, dependency_tree: *const bun.StringHashMap(std.ArrayList([]const u8)), ) bun.OOM!u32 { - const source = logger.Source.initPathString("audit-response.json", response_text); + const source = &logger.Source.initPathString("audit-response.json", response_text); var log = logger.Log.init(allocator); defer log.deinit(); - const expr = @import("../json_parser.zig").parse(&source, &log, allocator, true) catch { + const expr = @import("../json_parser.zig").parse(source, &log, allocator, true) catch { Output.writer().writeAll(response_text) catch {}; Output.writer().writeByte('\n') catch {}; return 1; diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index ac76cbb01e..08faf854f9 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -160,12 +160,12 @@ pub const BunxCommand = struct { } const package_json_contents = package_json_read.bytes.items; - const source = bun.logger.Source.initPathString(bun.span(subpath_z), package_json_contents); + const source = &bun.logger.Source.initPathString(bun.span(subpath_z), package_json_contents); bun.JSAst.Expr.Data.Store.create(); bun.JSAst.Stmt.Data.Store.create(); - const expr = try bun.JSON.parsePackageJSONUTF8(&source, transpiler.log, transpiler.allocator); + const expr = try bun.JSON.parsePackageJSONUTF8(source, transpiler.log, transpiler.allocator); // choose the first package that fits if (expr.get("bin")) |bin_expr| { diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 2ddaf436f0..119fa520b2 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -688,9 +688,9 @@ pub const CreateCommand = struct { if (package_json_file != null) { initializeStore(); - var source = logger.Source.initPathString("package.json", package_json_contents.list.items); + const source = &logger.Source.initPathString("package.json", package_json_contents.list.items); - var package_json_expr = JSON.parseUTF8(&source, ctx.log, ctx.allocator) catch { + var package_json_expr = JSON.parseUTF8(source, ctx.log, ctx.allocator) catch { package_json_file = null; break :process_package_json; }; @@ -1429,7 +1429,7 @@ pub const CreateCommand = struct { @TypeOf(&package_json_writer), &package_json_writer, package_json_expr, - &source, + source, .{ .mangled_props = null }, ) catch |err| { Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); @@ -2134,8 +2134,8 @@ pub const Example = struct { progress.name = "Parsing package.json"; refresher.refresh(); initializeStore(); - var source = logger.Source.initPathString("package.json", mutable.list.items); - var expr = JSON.parseUTF8(&source, ctx.log, ctx.allocator) catch |err| { + const source = &logger.Source.initPathString("package.json", mutable.list.items); + var expr = JSON.parseUTF8(source, ctx.log, ctx.allocator) catch |err| { progress.end(); refresher.refresh(); @@ -2265,8 +2265,8 @@ pub const Example = struct { } initializeStore(); - var source = logger.Source.initPathString("examples.json", mutable.list.items); - const examples_object = JSON.parseUTF8(&source, ctx.log, ctx.allocator) catch |err| { + const source = &logger.Source.initPathString("examples.json", mutable.list.items); + const examples_object = JSON.parseUTF8(source, ctx.log, ctx.allocator) catch |err| { if (ctx.log.errors > 0) { try ctx.log.print(Output.errorWriter()); Global.exit(1); diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index d73d145294..28b73b8f98 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -473,9 +473,9 @@ pub const InitCommand = struct { var did_load_package_json = false; if (package_json_contents.list.items.len > 0) { process_package_json: { - var source = logger.Source.initPathString("package.json", package_json_contents.list.items); + const source = &logger.Source.initPathString("package.json", package_json_contents.list.items); var log = logger.Log.init(alloc); - var package_json_expr = JSON.parsePackageJSONUTF8(&source, &log, alloc) catch { + var package_json_expr = JSON.parsePackageJSONUTF8(source, &log, alloc) catch { package_json_file = null; break :process_package_json; }; diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig index 89c2403d35..3d699e4565 100644 --- a/src/cli/pack_command.zig +++ b/src/cli/pack_command.zig @@ -700,12 +700,12 @@ pub const PackCommand = struct { if (strings.eqlComptime(entry_name, "package.json")) { if (entry.kind != .file) break :root_depth; // find more dependencies to bundle - const source = File.toSourceAt(dir, entryNameZ(entry_name, entry_subpath), ctx.allocator, .{}).unwrap() catch |err| { + const source = &(File.toSourceAt(dir, entryNameZ(entry_name, entry_subpath), ctx.allocator, .{}).unwrap() catch |err| { Output.err(err, "failed to read package.json: \"{s}\"", .{entry_subpath}); Global.crash(); - }; + }); - const json = JSON.parsePackageJSONUTF8(&source, ctx.manager.log, ctx.allocator) catch + const json = JSON.parsePackageJSONUTF8(source, ctx.manager.log, ctx.allocator) catch break :root_depth; // for each dependency in `dependencies` find the closest node_modules folder @@ -1761,7 +1761,7 @@ pub const PackCommand = struct { package_name, package_version, &json.root, - json.source, + &json.source, shasum, integrity, ); diff --git a/src/cli/pm_view_command.zig b/src/cli/pm_view_command.zig index 741285241d..1d3a6cf8e7 100644 --- a/src/cli/pm_view_command.zig +++ b/src/cli/pm_view_command.zig @@ -27,9 +27,9 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin switch (bun.sys.File.readFrom(manager.root_dir.fd, "package.json", allocator)) { .err => {}, .result => |str| { - const source = logger.Source.initPathString("package.json", str); + const source = &logger.Source.initPathString("package.json", str); var log = logger.Log.init(allocator); - const json = JSON.parse(&source, &log, allocator, false) catch break :from_package_json; + const json = JSON.parse(source, &log, allocator, false) catch break :from_package_json; if (json.getStringCloned(allocator, "name") catch null) |name| { if (name.len > 0) { break :brk name; @@ -99,8 +99,8 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin } var log = logger.Log.init(allocator); - const source = logger.Source.initPathString("view.json", response_buf.list.items); - var json = JSON.parseUTF8(&source, &log, allocator) catch |err| { + const source = &logger.Source.initPathString("view.json", response_buf.list.items); + var json = JSON.parseUTF8(source, &log, allocator) catch |err| { Output.err(err, "failed to parse response body as JSON", .{}); Global.crash(); }; @@ -241,7 +241,7 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin @TypeOf(&package_json_writer), &package_json_writer, value, - &source, + source, .{ .mangled_props = null, }, @@ -277,7 +277,7 @@ pub fn view(allocator: std.mem.Allocator, manager: *PackageManager, spec_: strin @TypeOf(&package_json_writer), &package_json_writer, manifest, - &source, + source, .{ .mangled_props = null, .indent = .{ diff --git a/src/cli/publish_command.zig b/src/cli/publish_command.zig index 93ee225641..8aea179caf 100644 --- a/src/cli/publish_command.zig +++ b/src/cli/publish_command.zig @@ -167,8 +167,8 @@ pub const PublishCommand = struct { const package_json_contents = maybe_package_json_contents orelse return error.MissingPackageJSON; const package_name, const package_version, var json, const json_source = package_info: { - const source = logger.Source.initPathString("package.json", package_json_contents); - const json = JSON.parsePackageJSONUTF8(&source, manager.log, ctx.allocator) catch |err| { + const source = &logger.Source.initPathString("package.json", package_json_contents); + const json = JSON.parsePackageJSONUTF8(source, manager.log, ctx.allocator) catch |err| { return switch (err) { error.OutOfMemory => |oom| return oom, else => error.InvalidPackageJSON, @@ -868,7 +868,7 @@ pub const PublishCommand = struct { package_name: string, package_version: string, json: *Expr, - json_source: logger.Source, + json_source: *const logger.Source, shasum: sha.SHA1.Digest, integrity: sha.SHA512.Digest, ) OOM!string { @@ -966,7 +966,7 @@ pub const PublishCommand = struct { @TypeOf(&writer), &writer, json.*, - &json_source, + json_source, .{ .minify_whitespace = true, .mangled_props = null, diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 12cde61c88..426d482384 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -209,9 +209,9 @@ pub const UpgradeCommand = struct { var log = logger.Log.init(allocator); defer if (comptime silent) log.deinit(); - var source = logger.Source.initPathString("releases.json", metadata_body.list.items); + const source = &logger.Source.initPathString("releases.json", metadata_body.list.items); initializeStore(); - var expr = JSON.parseUTF8(&source, &log, allocator) catch |err| { + var expr = JSON.parseUTF8(source, &log, allocator) catch |err| { if (!silent) { progress.?.end(); refresher.?.refresh(); diff --git a/src/defines.zig b/src/defines.zig index b2cf30ed48..566fe23c65 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -140,11 +140,11 @@ pub const DefineData = struct { }; } const _log = log; - var source = logger.Source{ + const source = &logger.Source{ .contents = value_str, .path = defines_path, }; - const expr = try json_parser.parseEnvJSON(&source, _log, allocator); + const expr = try json_parser.parseEnvJSON(source, _log, allocator); const cloned = try expr.data.deepClone(allocator); return .{ .value = cloned, diff --git a/src/env_loader.zig b/src/env_loader.zig index 9d13255d45..dd6cfbd78a 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -518,8 +518,8 @@ pub const Loader = struct { // mostly for tests pub fn loadFromString(this: *Loader, str: string, comptime overwrite: bool, comptime expand: bool) void { - var source = logger.Source.initPathString("test", str); - Parser.parse(&source, this.allocator, this.map, overwrite, false, expand); + const source = &logger.Source.initPathString("test", str); + Parser.parse(source, this.allocator, this.map, overwrite, false, expand); std.mem.doNotOptimizeAway(&source); } @@ -774,10 +774,10 @@ pub const Loader = struct { // The null byte here is mostly for debugging purposes. buf[end] = 0; - const source = logger.Source.initPathString(base, buf[0..amount_read]); + const source = &logger.Source.initPathString(base, buf[0..amount_read]); Parser.parse( - &source, + source, this.allocator, this.map, override, @@ -785,7 +785,7 @@ pub const Loader = struct { true, ); - @field(this, base) = source; + @field(this, base) = source.*; } pub fn loadEnvFileDynamic( @@ -845,10 +845,10 @@ pub const Loader = struct { // The null byte here is mostly for debugging purposes. buf[end] = 0; - const source = logger.Source.initPathString(file_path, buf[0..amount_read]); + const source = &logger.Source.initPathString(file_path, buf[0..amount_read]); Parser.parse( - &source, + source, this.allocator, this.map, override, @@ -856,7 +856,7 @@ pub const Loader = struct { true, ); - try this.custom_files_loaded.put(file_path, source); + try this.custom_files_loaded.put(file_path, source.*); } }; diff --git a/src/ini.zig b/src/ini.zig index e3f8f2b46a..06b7bdfb6e 100644 --- a/src/ini.zig +++ b/src/ini.zig @@ -515,7 +515,7 @@ pub const IniTestingAPIs = struct { defer npmrc_contents.deref(); const npmrc_utf8 = npmrc_contents.toUTF8(bun.default_allocator); defer npmrc_utf8.deinit(); - const source = bun.logger.Source.initPathString("", npmrc_utf8.slice()); + const source = &bun.logger.Source.initPathString("", npmrc_utf8.slice()); var log = bun.logger.Log.init(bun.default_allocator); defer log.deinit(); @@ -565,7 +565,7 @@ pub const IniTestingAPIs = struct { install.* = std.mem.zeroes(bun.Schema.Api.BunInstall); var configs = std.ArrayList(ConfigIterator.Item).init(allocator); defer configs.deinit(); - loadNpmrc(allocator, install, env, ".npmrc", &log, &source, &configs) catch { + loadNpmrc(allocator, install, env, ".npmrc", &log, source, &configs) catch { return log.toJS(globalThis, allocator, "error"); }; @@ -875,14 +875,14 @@ pub fn loadNpmrcConfig( } for (npmrc_paths) |npmrc_path| { - const source = bun.sys.File.toSource(npmrc_path, allocator, .{ .convert_bom = true }).unwrap() catch |err| { + const source = &(bun.sys.File.toSource(npmrc_path, allocator, .{ .convert_bom = true }).unwrap() catch |err| { if (auto_loaded) continue; Output.err(err, "failed to read .npmrc: \"{s}\"", .{npmrc_path}); Global.crash(); - }; + }); defer allocator.free(source.contents); - loadNpmrc(allocator, install, env, npmrc_path, &log, &source, &configs) catch |err| { + loadNpmrc(allocator, install, env, npmrc_path, &log, source, &configs) catch |err| { switch (err) { error.OutOfMemory => bun.outOfMemory(), } diff --git a/src/install/install.zig b/src/install/install.zig index 129ae35f75..6b201685f4 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1245,7 +1245,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { // Don't keep it open while we're parsing the JSON. // The longer the file stays open, the more likely it causes issues for // other processes on Windows. - const source = this.getInstalledPackageJsonSource(root_node_modules_dir, &mutable, resolution_tag) orelse return false; + const source = &(this.getInstalledPackageJsonSource(root_node_modules_dir, &mutable, resolution_tag) orelse return false); var log = logger.Log.init(this.allocator); defer log.deinit(); @@ -1254,7 +1254,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { var package_json_checker = JSON.PackageJSONVersionChecker.init( this.allocator, - &source, + source, &log, ) catch return false; _ = package_json_checker.parseExpr() catch return false; @@ -2951,17 +2951,17 @@ pub const PackageManager = struct { const key = allocator.dupeZ(u8, path) catch bun.outOfMemory(); entry.key_ptr.* = key; - const source = bun.sys.File.toSource(key, allocator, .{}).unwrap() catch |err| { + const source = &(bun.sys.File.toSource(key, allocator, .{}).unwrap() catch |err| { _ = this.map.remove(key); allocator.free(key); return .{ .read_err = err }; - }; + }); if (comptime opts.init_reset_store) initializeStore(); const json = JSON.parsePackageJSONUTF8WithOpts( - &source, + source, log, allocator, .{ @@ -2980,7 +2980,7 @@ pub const PackageManager = struct { entry.value_ptr.* = .{ .root = json.root.deepClone(bun.default_allocator) catch bun.outOfMemory(), - .source = source, + .source = source.*, .indentation = json.indentation, }; @@ -2992,7 +2992,7 @@ pub const PackageManager = struct { this: *@This(), allocator: std.mem.Allocator, log: *logger.Log, - source: logger.Source, + source: *const logger.Source, comptime opts: GetJSONOptions, ) GetResult { bun.assertWithLocation(std.fs.path.isAbsolute(source.path.text), @src()); @@ -3015,7 +3015,7 @@ pub const PackageManager = struct { initializeStore(); const json_result = JSON.parsePackageJSONUTF8WithOpts( - &source, + source, log, allocator, .{ @@ -3033,7 +3033,7 @@ pub const PackageManager = struct { entry.value_ptr.* = .{ .root = json.root.deepClone(allocator) catch bun.outOfMemory(), - .source = source, + .source = source.*, .indentation = json.indentation, }; @@ -6149,7 +6149,7 @@ pub const PackageManager = struct { var pkg = Lockfile.Package{}; if (data.json) |json| { - const package_json_source = logger.Source.initPathString( + const package_json_source = &logger.Source.initPathString( json.path, json.buf, ); @@ -6227,7 +6227,7 @@ pub const PackageManager = struct { }, .local_tarball, .remote_tarball => { const json = data.json.?; - const package_json_source = logger.Source.initPathString( + const package_json_source = &logger.Source.initPathString( json.path, json.buf, ); @@ -6282,13 +6282,13 @@ pub const PackageManager = struct { }, else => if (data.json.?.buf.len > 0) { const json = data.json.?; - const package_json_source = logger.Source.initPathString( + const package_json_source = &logger.Source.initPathString( json.path, json.buf, ); initializeStore(); const json_root = JSON.parsePackageJSONUTF8( - &package_json_source, + package_json_source, manager.log, manager.allocator, ) catch |err| { @@ -7925,10 +7925,10 @@ pub const PackageManager = struct { // Step 1. parse the nearest package.json file { - const package_json_source = bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { + const package_json_source = &(bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { Output.errGeneric("failed to read \"{s}\" for linking: {s}", .{ manager.original_package_json_path, @errorName(err) }); Global.crash(); - }; + }); lockfile.initEmpty(ctx.allocator); var resolver: void = {}; @@ -8106,10 +8106,10 @@ pub const PackageManager = struct { // Step 1. parse the nearest package.json file { - const package_json_source = bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { + const package_json_source = &(bun.sys.File.toSource(manager.original_package_json_path, ctx.allocator, .{}).unwrap() catch |err| { Output.errGeneric("failed to read \"{s}\" for unlinking: {s}", .{ manager.original_package_json_path, @errorName(err) }); Global.crash(); - }; + }); lockfile.initEmpty(ctx.allocator); var resolver: void = {}; @@ -8999,11 +8999,11 @@ pub const PackageManager = struct { } } - const source = logger.Source.initPathString("package.json", new_package_json_source); + const source = &logger.Source.initPathString("package.json", new_package_json_source); // Now, we _re_ parse our in-memory edited package.json // so we can commit the version we changed from the lockfile - var new_package_json = JSON.parsePackageJSONUTF8(&source, manager.log, manager.allocator) catch |err| { + var new_package_json = JSON.parsePackageJSONUTF8(source, manager.log, manager.allocator) catch |err| { Output.prettyErrorln("package.json failed to parse due to error {s}", .{@errorName(err)}); Global.crash(); }; @@ -9038,7 +9038,7 @@ pub const PackageManager = struct { @TypeOf(&package_json_writer_two), &package_json_writer_two, new_package_json, - &source, + source, .{ .indent = current_package_json_indent, .mangled_props = null, @@ -9353,7 +9353,7 @@ pub const PackageManager = struct { .path => brk: { var lockfile = manager.lockfile; - const package_json_source: logger.Source = src: { + const package_json_source: *const logger.Source = &src: { const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); switch (bun.sys.File.toSource(package_json_path, manager.allocator, .{})) { @@ -9367,7 +9367,7 @@ pub const PackageManager = struct { defer manager.allocator.free(package_json_source.contents); initializeStore(); - const json = JSON.parsePackageJSONUTF8(&package_json_source, manager.log, manager.allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(package_json_source, manager.log, manager.allocator) catch |err| { manager.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); Global.crash(); @@ -9764,7 +9764,7 @@ pub const PackageManager = struct { var resolution_buf: [1024]u8 = undefined; const _cache_dir: std.fs.Dir, const _cache_dir_subpath: stringZ, const _changes_dir: []const u8, const _pkg: Package = switch (arg_kind) { .path => result: { - const package_json_source: logger.Source = brk: { + const package_json_source: *const logger.Source = &brk: { const package_json_path = bun.path.joinZ(&[_][]const u8{ argument, "package.json" }, .auto); switch (bun.sys.File.toSource(package_json_path, manager.allocator, .{})) { @@ -9778,7 +9778,7 @@ pub const PackageManager = struct { defer manager.allocator.free(package_json_source.contents); initializeStore(); - const json = JSON.parsePackageJSONUTF8(&package_json_source, manager.log, manager.allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(package_json_source, manager.log, manager.allocator) catch |err| { manager.log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), package_json_source.path.prettyDir() }); Global.crash(); @@ -12447,7 +12447,7 @@ pub const PackageManager = struct { manager.progress = .{}; // Step 2. Parse the package.json file - const root_package_json_source = logger.Source.initPathString(package_json_cwd, root_package_json_contents); + const root_package_json_source = &logger.Source.initPathString(package_json_cwd, root_package_json_contents); switch (load_result) { .err => |cause| { diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index c328b05263..1bb7834bc4 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -244,9 +244,9 @@ pub fn loadFromDir( }; if (lockfile_format == .text) { - const source = logger.Source.initPathString("bun.lock", buf); + const source = &logger.Source.initPathString("bun.lock", buf); initializeStore(); - const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(source, log, allocator) catch |err| { return .{ .err = .{ .step = .parse_file, @@ -257,7 +257,7 @@ pub fn loadFromDir( }; }; - TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, source, log, manager) catch |err| { switch (err) { error.OutOfMemory => bun.outOfMemory(), else => { @@ -306,13 +306,13 @@ pub fn loadFromDir( const text_lockfile_bytes = writer_buf.list.items; - const source = logger.Source.initPathString("bun.lock", text_lockfile_bytes); + const source = &logger.Source.initPathString("bun.lock", text_lockfile_bytes); initializeStore(); - const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(source, log, allocator) catch |err| { Output.panic("failed to print valid json from binary lockfile: {s}", .{@errorName(err)}); }; - TextLockfile.parseIntoBinaryLockfile(this, allocator, json, &source, log, manager) catch |err| { + TextLockfile.parseIntoBinaryLockfile(this, allocator, json, source, log, manager) catch |err| { Output.panic("failed to parse text lockfile converted from binary lockfile: {s}", .{@errorName(err)}); }; diff --git a/src/install/lockfile/OverrideMap.zig b/src/install/lockfile/OverrideMap.zig index 9dfeb92892..271486af76 100644 --- a/src/install/lockfile/OverrideMap.zig +++ b/src/install/lockfile/OverrideMap.zig @@ -113,7 +113,7 @@ pub fn parseAppend( lockfile: *Lockfile, root_package: *Lockfile.Package, log: *logger.Log, - json_source: logger.Source, + json_source: *const logger.Source, expr: Expr, builder: *Lockfile.StringBuilder, ) !void { @@ -134,13 +134,13 @@ pub fn parseFromOverrides( pm: *PackageManager, lockfile: *Lockfile, root_package: *Lockfile.Package, - source: logger.Source, + source: *const logger.Source, log: *logger.Log, expr: Expr, builder: *Lockfile.StringBuilder, ) !void { if (expr.data != .e_object) { - try log.addWarningFmt(&source, expr.loc, lockfile.allocator, "\"overrides\" must be an object", .{}); + try log.addWarningFmt(source, expr.loc, lockfile.allocator, "\"overrides\" must be an object", .{}); return error.Invalid; } @@ -150,7 +150,7 @@ pub fn parseFromOverrides( const key = prop.key.?; const k = key.asString(lockfile.allocator).?; if (k.len == 0) { - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Missing overridden package name", .{}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Missing overridden package name", .{}); continue; } @@ -165,26 +165,26 @@ pub fn parseFromOverrides( if (value_expr.asProperty(".")) |dot| { if (dot.expr.data == .e_string) { if (value_expr.data.e_object.properties.len > 1) { - try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Bun currently does not support nested \"overrides\"", .{}); + try log.addWarningFmt(source, value_expr.loc, lockfile.allocator, "Bun currently does not support nested \"overrides\"", .{}); } break :value dot.expr; } else { - try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Invalid override value for \"{s}\"", .{k}); + try log.addWarningFmt(source, value_expr.loc, lockfile.allocator, "Invalid override value for \"{s}\"", .{k}); continue; } } else { - try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Bun currently does not support nested \"overrides\"", .{}); + try log.addWarningFmt(source, value_expr.loc, lockfile.allocator, "Bun currently does not support nested \"overrides\"", .{}); continue; } } - try log.addWarningFmt(&source, value_expr.loc, lockfile.allocator, "Invalid override value for \"{s}\"", .{k}); + try log.addWarningFmt(source, value_expr.loc, lockfile.allocator, "Invalid override value for \"{s}\"", .{k}); continue; }; const version_str = value.data.e_string.slice(lockfile.allocator); if (strings.hasPrefixComptime(version_str, "patch:")) { // TODO(dylan-conway): apply .patch files to packages - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Bun currently does not support patched package \"overrides\"", .{}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Bun currently does not support patched package \"overrides\"", .{}); continue; } @@ -212,13 +212,13 @@ pub fn parseFromResolutions( pm: *PackageManager, lockfile: *Lockfile, root_package: *Lockfile.Package, - source: logger.Source, + source: *const logger.Source, log: *logger.Log, expr: Expr, builder: *Lockfile.StringBuilder, ) !void { if (expr.data != .e_object) { - try log.addWarningFmt(&source, expr.loc, lockfile.allocator, "\"resolutions\" must be an object with string values", .{}); + try log.addWarningFmt(source, expr.loc, lockfile.allocator, "\"resolutions\" must be an object with string values", .{}); return; } try this.map.ensureUnusedCapacity(lockfile.allocator, expr.data.e_object.properties.len); @@ -228,12 +228,12 @@ pub fn parseFromResolutions( if (strings.hasPrefixComptime(k, "**/")) k = k[3..]; if (k.len == 0) { - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Missing resolution package name", .{}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Missing resolution package name", .{}); continue; } const value = prop.value.?; if (value.data != .e_string) { - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Expected string value for resolution \"{s}\"", .{k}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Expected string value for resolution \"{s}\"", .{k}); continue; } // currently we only support one level deep, so we should error if there are more than one @@ -241,22 +241,22 @@ pub fn parseFromResolutions( // - "@namespace/hello/world" if (k[0] == '@') { const first_slash = strings.indexOfChar(k, '/') orelse { - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Invalid package name \"{s}\"", .{k}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Invalid package name \"{s}\"", .{k}); continue; }; if (strings.indexOfChar(k[first_slash + 1 ..], '/') != null) { - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Bun currently does not support nested \"resolutions\"", .{}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Bun currently does not support nested \"resolutions\"", .{}); continue; } } else if (strings.indexOfChar(k, '/') != null) { - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Bun currently does not support nested \"resolutions\"", .{}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Bun currently does not support nested \"resolutions\"", .{}); continue; } const version_str = value.data.e_string.data; if (strings.hasPrefixComptime(version_str, "patch:")) { // TODO(dylan-conway): apply .patch files to packages - try log.addWarningFmt(&source, key.loc, lockfile.allocator, "Bun currently does not support patched package \"resolutions\"", .{}); + try log.addWarningFmt(source, key.loc, lockfile.allocator, "Bun currently does not support patched package \"resolutions\"", .{}); continue; } @@ -283,7 +283,7 @@ pub fn parseOverrideValue( lockfile: *Lockfile, package_manager: *PackageManager, root_package: *Lockfile.Package, - source: logger.Source, + source: *const logger.Source, loc: logger.Loc, log: *logger.Log, key: []const u8, @@ -291,7 +291,7 @@ pub fn parseOverrideValue( builder: *Lockfile.StringBuilder, ) !?Dependency { if (value.len == 0) { - try log.addWarningFmt(&source, loc, lockfile.allocator, "Missing " ++ field ++ " value", .{}); + try log.addWarningFmt(source, loc, lockfile.allocator, "Missing " ++ field ++ " value", .{}); return null; } @@ -309,7 +309,7 @@ pub fn parseOverrideValue( return dep; } } - try log.addWarningFmt(&source, loc, lockfile.allocator, "Could not resolve " ++ field ++ " \"{s}\" (you need \"{s}\" in your dependencies)", .{ value, ref_name }); + try log.addWarningFmt(source, loc, lockfile.allocator, "Could not resolve " ++ field ++ " \"{s}\" (you need \"{s}\" in your dependencies)", .{ value, ref_name }); return null; } @@ -331,7 +331,7 @@ pub fn parseOverrideValue( log, package_manager, ) orelse { - try log.addWarningFmt(&source, loc, lockfile.allocator, "Invalid " ++ field ++ " value \"{s}\"", .{value}); + try log.addWarningFmt(source, loc, lockfile.allocator, "Invalid " ++ field ++ " value \"{s}\"", .{value}); return null; }, }; diff --git a/src/install/lockfile/Package.zig b/src/install/lockfile/Package.zig index e1ed1d1c93..7b48673e0e 100644 --- a/src/install/lockfile/Package.zig +++ b/src/install/lockfile/Package.zig @@ -853,10 +853,10 @@ pub const Package = extern struct { var local_buf: bun.PathBuffer = undefined; const package_json_path = Path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &local_buf, &.{ path, "package.json" }, .auto); - const source = bun.sys.File.toSource(package_json_path, allocator, .{}).unwrap() catch { + const source = &(bun.sys.File.toSource(package_json_path, allocator, .{}).unwrap() catch { // Can't guarantee this workspace still exists break :brk false; - }; + }); var workspace = Package{}; @@ -952,13 +952,13 @@ pub const Package = extern struct { pm: *PackageManager, allocator: Allocator, log: *logger.Log, - source: logger.Source, + source: *const logger.Source, comptime ResolverContext: type, resolver: *ResolverContext, comptime features: Features, ) !void { initializeStore(); - const json = JSON.parsePackageJSONUTF8(&source, log, allocator) catch |err| { + const json = JSON.parsePackageJSONUTF8(source, log, allocator) catch |err| { log.print(Output.errorWriter()) catch {}; Output.prettyErrorln("{s} parsing package.json in \"{s}\"", .{ @errorName(err), source.path.prettyDir() }); Global.crash(); @@ -982,7 +982,7 @@ pub const Package = extern struct { pm: *PackageManager, allocator: Allocator, log: *logger.Log, - source: logger.Source, + source: *const logger.Source, comptime group: DependencyGroup, string_builder: *StringBuilder, comptime features: Features, @@ -1139,7 +1139,7 @@ pub const Package = extern struct { // workspace is not required to have a version, but if it does // and this version doesn't match it, fail to install try log.addErrorFmt( - &source, + source, logger.Loc.Empty, allocator, "No matching version for workspace dependency \"{s}\". Version: \"{s}\"", @@ -1245,11 +1245,11 @@ pub const Package = extern struct { notes[0] = .{ .text = try std.fmt.allocPrint(lockfile.allocator, "\"{s}\" originally specified here", .{external_alias.slice(buf)}), - .location = logger.Location.initOrNull(&source, source.rangeOfString(entry.value_ptr.*)), + .location = logger.Location.initOrNull(source, source.rangeOfString(entry.value_ptr.*)), }; try log.addRangeWarningFmtWithNotes( - &source, + source, source.rangeOfString(key_loc), lockfile.allocator, notes, @@ -1271,7 +1271,7 @@ pub const Package = extern struct { pm: *PackageManager, allocator: Allocator, log: *logger.Log, - source: logger.Source, + source: *const logger.Source, json: Expr, comptime ResolverContext: type, resolver: *ResolverContext, @@ -1437,7 +1437,7 @@ pub const Package = extern struct { switch (dependencies_q.expr.data) { .e_array => |arr| { if (!group.behavior.isWorkspace()) { - log.addErrorFmt(&source, dependencies_q.loc, allocator, + log.addErrorFmt(source, dependencies_q.loc, allocator, \\{0s} expects a map of specifiers, e.g. \\ "{0s}": {{ \\ "bun": "latest" @@ -1450,7 +1450,7 @@ pub const Package = extern struct { &pm.workspace_package_json_cache, log, arr, - &source, + source, dependencies_q.loc, &string_builder, ); @@ -1468,7 +1468,7 @@ pub const Package = extern struct { // if (obj.get("packages")) |packages_query| { if (packages_query.data != .e_array) { - log.addErrorFmt(&source, packages_query.loc, allocator, + log.addErrorFmt(source, packages_query.loc, allocator, // TODO: what if we could comptime call the syntax highlighter \\"workspaces.packages" expects an array of strings, e.g. \\ "workspaces": {{ @@ -1484,7 +1484,7 @@ pub const Package = extern struct { &pm.workspace_package_json_cache, log, packages_query.data.e_array, - &source, + source, packages_query.loc, &string_builder, ); @@ -1495,7 +1495,7 @@ pub const Package = extern struct { for (obj.properties.slice()) |item| { const key = item.key.?.asString(allocator).?; const value = item.value.?.asString(allocator) orelse { - log.addErrorFmt(&source, item.value.?.loc, allocator, + log.addErrorFmt(source, item.value.?.loc, allocator, // TODO: what if we could comptime call the syntax highlighter \\{0s} expects a map of specifiers, e.g. \\ "{0s}": {{ @@ -1518,7 +1518,7 @@ pub const Package = extern struct { }, else => { if (group.behavior.isWorkspace()) { - log.addErrorFmt(&source, dependencies_q.loc, allocator, + log.addErrorFmt(source, dependencies_q.loc, allocator, // TODO: what if we could comptime call the syntax highlighter \\"workspaces" expects an array of strings, e.g. \\ "workspaces": [ @@ -1526,7 +1526,7 @@ pub const Package = extern struct { \\ ] , .{}) catch {}; } else { - log.addErrorFmt(&source, dependencies_q.loc, allocator, + log.addErrorFmt(source, dependencies_q.loc, allocator, \\{0s} expects a map of specifiers, e.g. \\ "{0s}": {{ \\ "bun": "latest" @@ -1547,7 +1547,7 @@ pub const Package = extern struct { try lockfile.trusted_dependencies.?.ensureUnusedCapacity(allocator, arr.items.len); for (arr.slice()) |item| { const name = item.asString(allocator) orelse { - log.addErrorFmt(&source, q.loc, allocator, + log.addErrorFmt(source, q.loc, allocator, \\trustedDependencies expects an array of strings, e.g. \\ "trustedDependencies": [ \\ "package_name" @@ -1559,7 +1559,7 @@ pub const Package = extern struct { } }, else => { - log.addErrorFmt(&source, q.loc, allocator, + log.addErrorFmt(source, q.loc, allocator, \\trustedDependencies expects an array of strings, e.g. \\ "trustedDependencies": [ \\ "package_name" @@ -1940,7 +1940,7 @@ pub const Package = extern struct { if (comptime features.is_main) { try lockfile.overrides.parseAppend(pm, lockfile, package, log, source, json, &string_builder); if (json.get("workspaces")) |workspaces_expr| { - try lockfile.catalogs.parseAppend(pm, lockfile, log, &source, workspaces_expr, &string_builder); + try lockfile.catalogs.parseAppend(pm, lockfile, log, source, workspaces_expr, &string_builder); } } diff --git a/src/install/npm.zig b/src/install/npm.zig index 0b2adecb71..d333ea5c20 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -170,8 +170,8 @@ pub fn whoami(allocator: std.mem.Allocator, manager: *PackageManager) WhoamiErro } var log = logger.Log.init(allocator); - const source = logger.Source.initPathString("???", response_buf.list.items); - const json = JSON.parseUTF8(&source, &log, allocator) catch |err| { + const source = &logger.Source.initPathString("???", response_buf.list.items); + const json = JSON.parseUTF8(source, &log, allocator) catch |err| { switch (err) { error.OutOfMemory => |oom| return oom, else => { @@ -199,8 +199,8 @@ pub fn responseError( ) OOM!noreturn { const message = message: { var log = logger.Log.init(allocator); - const source = logger.Source.initPathString("???", response_body.list.items); - const json = JSON.parseUTF8(&source, &log, allocator) catch |err| { + const source = &logger.Source.initPathString("???", response_body.list.items); + const json = JSON.parseUTF8(source, &log, allocator) catch |err| { switch (err) { error.OutOfMemory => |oom| return oom, else => break :message null, @@ -1563,20 +1563,20 @@ pub const PackageManifest = struct { etag: []const u8, public_max_age: u32, ) !?PackageManifest { - const source = logger.Source.initPathString(expected_name, json_buffer); + const source = &logger.Source.initPathString(expected_name, json_buffer); initializeStore(); defer bun.JSAst.Stmt.Data.Store.memory_allocator.?.pop(); var arena = bun.ArenaAllocator.init(allocator); defer arena.deinit(); const json = JSON.parseUTF8( - &source, + source, log, arena.allocator(), ) catch return null; if (json.asProperty("error")) |error_q| { if (error_q.expr.asString(allocator)) |err| { - log.addErrorFmt(&source, logger.Loc.Empty, allocator, "npm error: {s}", .{err}) catch unreachable; + log.addErrorFmt(source, logger.Loc.Empty, allocator, "npm error: {s}", .{err}) catch unreachable; return null; } } @@ -1647,7 +1647,7 @@ pub const PackageManifest = struct { if (Environment.allow_assert) bun.assertWithLocation(parsed_version.valid, @src()); if (!parsed_version.valid) { - log.addErrorFmt(&source, prop.value.?.loc, allocator, "Failed to parse dependency {s}", .{version_name}) catch unreachable; + log.addErrorFmt(source, prop.value.?.loc, allocator, "Failed to parse dependency {s}", .{version_name}) catch unreachable; continue; } diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index 6e5bad2f3f..96662f4f2d 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -189,7 +189,7 @@ pub const FolderResolution = union(Tag) { manager, manager.allocator, manager.log, - json.source, + &json.source, json.root, ResolverType, resolver, @@ -199,7 +199,7 @@ pub const FolderResolution = union(Tag) { const tracer = bun.perf.trace("FolderResolver.readPackageJSONFromDisk.folder"); defer tracer.end(); - const source = brk: { + const source = &brk: { var file = bun.sys.File.from(try bun.sys.openatA( bun.FD.cwd(), abs, diff --git a/src/js/node/net.ts b/src/js/node/net.ts index 3c6e4bfb95..840818c886 100644 --- a/src/js/node/net.ts +++ b/src/js/node/net.ts @@ -1708,12 +1708,12 @@ function internalConnect(self, options, address, port, addressType, localAddress if (localAddress || localPort) { if (addressType === 4) { - localAddress ||= '0.0.0.0'; + localAddress ||= "0.0.0.0"; // TODO: // err = self._handle.bind(localAddress, localPort); } else { // addressType === 6 - localAddress ||= '::'; + localAddress ||= "::"; // TODO: // err = self._handle.bind6(localAddress, localPort, flags); } diff --git a/src/js_ast.zig b/src/js_ast.zig index e5cb2a5ad5..79af1fe750 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -3352,8 +3352,8 @@ pub const Expr = struct { const mime_type = mime_type_ orelse MimeType.init(blob.content_type, null, null); if (mime_type.category == .json) { - var source = logger.Source.initPathString("fetch.json", bytes); - var out_expr = JSONParser.parseForMacro(&source, log, allocator) catch { + const source = &logger.Source.initPathString("fetch.json", bytes); + var out_expr = JSONParser.parseForMacro(source, log, allocator) catch { return error.MacroFailed; }; out_expr.loc = loc; diff --git a/src/js_lexer.zig b/src/js_lexer.zig index b0f40e5d02..4795eae387 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -2048,10 +2048,10 @@ fn NewLexer_( }; } - pub fn initTSConfig(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType { + pub fn initJSON(log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator) !LexerType { var lex = LexerType{ .log = log, - .source = source, + .source = source.*, .temp_buffer_u16 = std.ArrayList(u16).init(allocator), .prev_error_loc = logger.Loc.Empty, .allocator = allocator, @@ -2064,26 +2064,10 @@ fn NewLexer_( return lex; } - pub fn initJSON(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType { - var lex = LexerType{ - .log = log, - .source = source, - .temp_buffer_u16 = std.ArrayList(u16).init(allocator), - .prev_error_loc = logger.Loc.Empty, - .allocator = allocator, - .comments_to_preserve_before = std.ArrayList(js_ast.G.Comment).init(allocator), - .all_comments = std.ArrayList(logger.Range).init(allocator), - }; - lex.step(); - try lex.next(); - - return lex; - } - - pub fn initWithoutReading(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) LexerType { + pub fn initWithoutReading(log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator) LexerType { return LexerType{ .log = log, - .source = source, + .source = source.*, .temp_buffer_u16 = std.ArrayList(u16).init(allocator), .prev_error_loc = logger.Loc.Empty, .allocator = allocator, @@ -2092,7 +2076,7 @@ fn NewLexer_( }; } - pub fn init(log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) !LexerType { + pub fn init(log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator) !LexerType { var lex = initWithoutReading(log, source, allocator); lex.step(); try lex.next(); diff --git a/src/js_parser.zig b/src/js_parser.zig index 202302c7d3..80b130947e 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -4279,7 +4279,7 @@ pub const Parser = struct { return Parser{ .options = _options, .allocator = allocator, - .lexer = try js_lexer.Lexer.init(log, source.*, allocator), + .lexer = try js_lexer.Lexer.init(log, source, allocator), .define = define, .source = source, .log = log, @@ -24150,7 +24150,7 @@ pub fn newLazyExportASTImpl( var parser = Parser{ .options = opts, .allocator = allocator, - .lexer = js_lexer.Lexer.initWithoutReading(log, source.*, allocator), + .lexer = js_lexer.Lexer.initWithoutReading(log, source, allocator), .define = define, .source = source, .log = log, diff --git a/src/js_printer.zig b/src/js_printer.zig index 62a91dd6c3..b3988bb05d 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -406,14 +406,14 @@ pub const SourceMapHandler = struct { ctx: *anyopaque, callback: Callback, - const Callback = *const fn (*anyopaque, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void; - pub fn onSourceMapChunk(self: *const @This(), chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { + const Callback = *const fn (*anyopaque, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void; + pub fn onSourceMapChunk(self: *const @This(), chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void { try self.callback(self.ctx, chunk, source); } - pub fn For(comptime Type: type, comptime handler: (fn (t: *Type, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void)) type { + pub fn For(comptime Type: type, comptime handler: (fn (t: *Type, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void)) type { return struct { - pub fn onChunk(self: *anyopaque, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { + pub fn onChunk(self: *anyopaque, chunk: SourceMap.Chunk, source: *const logger.Source) anyerror!void { try handler(@as(*Type, @ptrCast(@alignCast(self))), chunk, source); } @@ -5853,7 +5853,7 @@ pub fn printAst( cache.put(printer.writer.ctx.getWritten(), source_maps_chunk.buffer.list.items); } - try handler.onSourceMapChunk(source_maps_chunk, source.*); + try handler.onSourceMapChunk(source_maps_chunk, source); } else { if (opts.runtime_transpiler_cache) |cache| { cache.put(printer.writer.ctx.getWritten(), ""); @@ -5861,7 +5861,7 @@ pub fn printAst( } } else if (comptime generate_source_map) { if (opts.source_map_handler) |handler| { - try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*); + try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source); } } @@ -6102,7 +6102,7 @@ pub fn printCommonJS( if (comptime generate_source_map) { if (opts.source_map_handler) |handler| { - try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source.*); + try handler.onSourceMapChunk(printer.source_map_builder.generateChunk(printer.writer.ctx.getWritten()), source); } } diff --git a/src/json_parser.zig b/src/json_parser.zig index b70715432a..5ad14e854b 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -127,11 +127,11 @@ fn JSONLikeParser_( allocator: std.mem.Allocator, list_allocator: std.mem.Allocator, - pub fn init(allocator: std.mem.Allocator, source_: logger.Source, log: *logger.Log) !Parser { + pub fn init(allocator: std.mem.Allocator, source_: *const logger.Source, log: *logger.Log) !Parser { return initWithListAllocator(allocator, allocator, source_, log); } - pub fn initWithListAllocator(allocator: std.mem.Allocator, list_allocator: std.mem.Allocator, source_: logger.Source, log: *logger.Log) !Parser { + pub fn initWithListAllocator(allocator: std.mem.Allocator, list_allocator: std.mem.Allocator, source_: *const logger.Source, log: *logger.Log) !Parser { Expr.Data.Store.assert(); Stmt.Data.Store.assert(); @@ -296,7 +296,7 @@ fn JSONLikeParser_( }, else => { if (comptime maybe_auto_quote) { - p.lexer = try Lexer.initJSON(p.log, p.source().*, p.allocator); + p.lexer = try Lexer.initJSON(p.log, p.source(), p.allocator); try p.lexer.parseStringLiteral(0); return p.parseExpr(false, force_utf8); } @@ -360,7 +360,7 @@ pub const PackageJSONVersionChecker = struct { pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser { return Parser{ - .lexer = try Lexer.init(log, source.*, allocator), + .lexer = try Lexer.init(log, source, allocator), .allocator = allocator, .log = log, .source = source, @@ -690,7 +690,7 @@ pub fn parse( allocator: std.mem.Allocator, comptime force_utf8: bool, ) !Expr { - var parser = try JSONParser.init(allocator, source.*, log); + var parser = try JSONParser.init(allocator, source, log); switch (source.contents.len) { // This is to be consisntent with how disabled JS files are handled 0 => { @@ -746,7 +746,7 @@ pub fn parsePackageJSONUTF8( .is_json = true, .allow_comments = true, .allow_trailing_commas = true, - }).init(allocator, source.*, log); + }).init(allocator, source, log); bun.assert(parser.source().contents.len > 0); return try parser.parseExpr(false, true); @@ -785,7 +785,7 @@ pub fn parsePackageJSONUTF8WithOpts( else => {}, } - var parser = try JSONLikeParser(opts).init(allocator, source.*, log); + var parser = try JSONLikeParser(opts).init(allocator, source, log); bun.assert(parser.source().contents.len > 0); const root = try parser.parseExpr(false, true); @@ -835,7 +835,7 @@ pub fn parseUTF8Impl( else => {}, } - var parser = try JSONParser.init(allocator, source.*, log); + var parser = try JSONParser.init(allocator, source, log); bun.assert(parser.source().contents.len > 0); const result = try parser.parseExpr(false, true); @@ -865,7 +865,7 @@ pub fn parseForMacro(source: *const logger.Source, log: *logger.Log, allocator: else => {}, } - var parser = try JSONParserForMacro.init(allocator, source.*, log); + var parser = try JSONParserForMacro.init(allocator, source, log); return try parser.parseExpr(false, false); } @@ -929,7 +929,7 @@ pub fn parseEnvJSON(source: *const logger.Source, log: *logger.Log, allocator: s else => {}, } - var parser = try DotEnvJSONParser.init(allocator, source.*, log); + var parser = try DotEnvJSONParser.init(allocator, source, log); switch (source.contents[0]) { '{', '[', '0'...'9', '"', '\'' => { @@ -980,7 +980,7 @@ pub fn parseTSConfig(source: *const logger.Source, log: *logger.Log, allocator: else => {}, } - var parser = try TSConfigParser.init(allocator, source.*, log); + var parser = try TSConfigParser.init(allocator, source, log); return parser.parseExpr(false, force_utf8); } @@ -1002,11 +1002,11 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { var log = logger.Log.init(default_allocator); defer log.msgs.deinit(); - var source = logger.Source.initPathString( + const source = &logger.Source.initPathString( "source.json", contents, ); - const expr = try parse(&source, &log, default_allocator); + const expr = try parse(source, &log, default_allocator); if (log.msgs.items.len > 0) { Output.panic("--FAIL--\nExpr {s}\nLog: {s}\n--FAIL--", .{ expr, log.msgs.items[0].data.text }); @@ -1014,7 +1014,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { const buffer_writer = js_printer.BufferWriter.init(default_allocator); var writer = js_printer.BufferPrinter.init(buffer_writer); - const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, &source, .{ + const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, source, .{ .mangled_props = null, }); var js = writer.ctx.buffer.list.items.ptr[0 .. written + 1]; diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index e18f14d4d2..33305e8c64 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -616,7 +616,7 @@ pub const PackageJSON = struct { var json_source = logger.Source.initPathString(key_path.text, entry.contents); json_source.path.pretty = json_source.path.text; - const json: js_ast.Expr = (r.caches.json.parsePackageJSON(r.log, json_source, allocator, true) catch |err| { + const json: js_ast.Expr = (r.caches.json.parsePackageJSON(r.log, &json_source, allocator, true) catch |err| { if (Environment.isDebug) { Output.printError("{s}: JSON parse error: {s}", .{ package_json_path, @errorName(err) }); } diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 27b2fe9d14..733127d023 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -2574,7 +2574,7 @@ pub const Resolver = struct { // then it will be undefined memory if we parse another tsconfig.json late const key_path = Fs.Path.init(r.fs.dirname_store.append(string, file) catch unreachable); - const source = logger.Source.initPathString(key_path.text, entry.contents); + const source = &logger.Source.initPathString(key_path.text, entry.contents); const file_dir = source.path.sourceDir(); var result = (try TSConfigJSON.parse(bun.default_allocator, r.log, source, &r.caches.json)) orelse return null; diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig index 649db4a39c..9271601e44 100644 --- a/src/resolver/tsconfig_json.zig +++ b/src/resolver/tsconfig_json.zig @@ -146,7 +146,7 @@ pub const TSConfigJSON = struct { pub fn parse( allocator: std.mem.Allocator, log: *logger.Log, - source: logger.Source, + source: *const logger.Source, json_cache: *cache.Json, ) anyerror!?*TSConfigJSON { // Unfortunately "tsconfig.json" isn't actually JSON. It's some other @@ -178,7 +178,7 @@ pub const TSConfigJSON = struct { // Parse "baseUrl" if (compiler_opts.expr.asProperty("baseUrl")) |base_url_prop| { if ((base_url_prop.expr.asString(allocator))) |base_url| { - result.base_url = strReplacingTemplates(allocator, base_url, &source) catch return null; + result.base_url = strReplacingTemplates(allocator, base_url, source) catch return null; has_base_url = true; } } @@ -193,7 +193,7 @@ pub const TSConfigJSON = struct { // Parse "jsxFactory" if (compiler_opts.expr.asProperty("jsxFactory")) |jsx_prop| { if (jsx_prop.expr.asString(allocator)) |str| { - result.jsx.factory = try parseMemberExpressionForJSX(log, &source, jsx_prop.loc, str, allocator); + result.jsx.factory = try parseMemberExpressionForJSX(log, source, jsx_prop.loc, str, allocator); result.jsx_flags.insert(.factory); } } @@ -201,7 +201,7 @@ pub const TSConfigJSON = struct { // Parse "jsxFragmentFactory" if (compiler_opts.expr.asProperty("jsxFragmentFactory")) |jsx_prop| { if (jsx_prop.expr.asString(allocator)) |str| { - result.jsx.fragment = try parseMemberExpressionForJSX(log, &source, jsx_prop.loc, str, allocator); + result.jsx.fragment = try parseMemberExpressionForJSX(log, source, jsx_prop.loc, str, allocator); result.jsx_flags.insert(.fragment); } } @@ -256,7 +256,7 @@ pub const TSConfigJSON = struct { }, .remove => {}, else => { - log.addRangeWarningFmt(&source, source.rangeOfString(jsx_prop.loc), allocator, "Invalid value \"{s}\" for \"importsNotUsedAsValues\"", .{str}) catch {}; + log.addRangeWarningFmt(source, source.rangeOfString(jsx_prop.loc), allocator, "Invalid value \"{s}\" for \"importsNotUsedAsValues\"", .{str}) catch {}; }, } } @@ -270,7 +270,7 @@ pub const TSConfigJSON = struct { if (str.len > 0) { // Only warn when there is actually content // Sometimes, people do "moduleSuffixes": [""] - log.addWarning(&source, prefixes.loc, "moduleSuffixes is not supported yet") catch {}; + log.addWarning(source, prefixes.loc, "moduleSuffixes is not supported yet") catch {}; break :handle_module_prefixes; } } @@ -292,7 +292,7 @@ pub const TSConfigJSON = struct { const key_prop = property.key orelse continue; const key = (key_prop.asString(allocator)) orelse continue; - if (!TSConfigJSON.isValidTSConfigPathPattern(key, log, &source, key_prop.loc, allocator)) { + if (!TSConfigJSON.isValidTSConfigPathPattern(key, log, source, key_prop.loc, allocator)) { continue; } @@ -329,12 +329,12 @@ pub const TSConfigJSON = struct { var count: usize = 0; for (array) |expr| { if ((expr.asString(allocator))) |str_| { - const str = strReplacingTemplates(allocator, str_, &source) catch return null; + const str = strReplacingTemplates(allocator, str_, source) catch return null; errdefer allocator.free(str); if (TSConfigJSON.isValidTSConfigPathPattern( str, log, - &source, + source, expr.loc, allocator, ) and @@ -342,7 +342,7 @@ pub const TSConfigJSON = struct { TSConfigJSON.isValidTSConfigPathNoBaseURLPattern( str, log, - &source, + source, allocator, expr.loc, ))) @@ -362,7 +362,7 @@ pub const TSConfigJSON = struct { }, else => { log.addRangeWarningFmt( - &source, + source, source.rangeOfString(key_prop.loc), allocator, "Substitutions for pattern \"{s}\" should be an array", diff --git a/src/sourcemap/sourcemap.zig b/src/sourcemap/sourcemap.zig index 618ced3cd2..b15d5fe3d7 100644 --- a/src/sourcemap/sourcemap.zig +++ b/src/sourcemap/sourcemap.zig @@ -1146,7 +1146,7 @@ pub fn appendSourceMapChunk(j: *StringJoiner, allocator: std.mem.Allocator, prev pub fn appendSourceMappingURLRemote( origin: URL, - source: Logger.Source, + source: *const Logger.Source, asset_prefix_path: []const u8, comptime Writer: type, writer: Writer, @@ -1228,7 +1228,7 @@ pub const Chunk = struct { pub fn printSourceMapContents( chunk: Chunk, - source: Logger.Source, + source: *const Logger.Source, mutable: MutableString, include_sources_contents: bool, comptime ascii_only: bool, @@ -1245,7 +1245,7 @@ pub const Chunk = struct { pub fn printSourceMapContentsAtOffset( chunk: Chunk, - source: Logger.Source, + source: *const Logger.Source, mutable: MutableString, include_sources_contents: bool, offset: usize, diff --git a/src/transpiler.zig b/src/transpiler.zig index b4700061ab..848e389289 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -1034,7 +1034,7 @@ pub const Transpiler = struct { var input_fd: ?StoredFileDescriptorType = null; - const source: logger.Source = brk: { + const source: *const logger.Source = &brk: { if (this_parse.virtual_source) |virtual_source| { break :brk virtual_source.*; } @@ -1084,12 +1084,12 @@ pub const Transpiler = struct { }; if (comptime return_file_only) { - return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty }; + return ParseResult{ .source = source.*, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty }; } if (source.contents.len == 0 or (source.contents.len < 33 and std.mem.trim(u8, source.contents, "\n\r ").len == 0)) { if (!loader.handlesEmptyFile()) { - return ParseResult{ .source = source, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty }; + return ParseResult{ .source = source.*, .input_fd = input_fd, .loader = loader, .empty = true, .ast = js_ast.Ast.empty }; } } @@ -1102,7 +1102,7 @@ pub const Transpiler = struct { // wasm magic number if (source.isWebAssembly()) { return ParseResult{ - .source = source, + .source = source.*, .input_fd = input_fd, .loader = .wasm, .empty = true, @@ -1167,11 +1167,11 @@ pub const Transpiler = struct { opts, transpiler.options.define, transpiler.log, - &source, + source, ) catch null) orelse return null) { .ast => |value| .{ .ast = value, - .source = source, + .source = source.*, .loader = loader, .input_fd = input_fd, .runtime_transpiler_cache = this_parse.runtime_transpiler_cache, @@ -1179,7 +1179,7 @@ pub const Transpiler = struct { .cached => .{ .ast = undefined, .runtime_transpiler_cache = this_parse.runtime_transpiler_cache, - .source = source, + .source = source.*, .loader = loader, .input_fd = input_fd, }, @@ -1203,7 +1203,7 @@ pub const Transpiler = struct { break :brk default_value; }, }, - .source = source, + .source = source.*, .loader = loader, .input_fd = input_fd, }, @@ -1214,11 +1214,11 @@ pub const Transpiler = struct { var expr = if (kind == .jsonc) // We allow importing tsconfig.*.json or jsconfig.*.json with comments // These files implicitly become JSONC files, which aligns with the behavior of text editors. - JSON.parseTSConfig(&source, transpiler.log, allocator, false) catch return null + JSON.parseTSConfig(source, transpiler.log, allocator, false) catch return null else if (kind == .json) - JSON.parse(&source, transpiler.log, allocator, false) catch return null + JSON.parse(source, transpiler.log, allocator, false) catch return null else if (kind == .toml) - TOML.parse(&source, transpiler.log, allocator, false) catch return null + TOML.parse(source, transpiler.log, allocator, false) catch return null else @compileError("unreachable"); @@ -1339,7 +1339,7 @@ pub const Transpiler = struct { return ParseResult{ .ast = ast, - .source = source, + .source = source.*, .loader = loader, .input_fd = input_fd, }; @@ -1363,7 +1363,7 @@ pub const Transpiler = struct { return ParseResult{ .ast = js_ast.Ast.initTest(parts), - .source = source, + .source = source.*, .loader = loader, .input_fd = input_fd, }; @@ -1383,7 +1383,7 @@ pub const Transpiler = struct { return ParseResult{ .ast = js_ast.Ast.empty, - .source = source, + .source = source.*, .loader = loader, .input_fd = input_fd, };