From 4fefb8507c2362860dc85c2aee77805b007e8873 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Wed, 3 Jul 2024 23:10:34 -0700 Subject: [PATCH] respect package.json indentation in `bun install` (#12328) --- src/bun.js/api/BunObject.zig | 2 +- src/bun.js/api/server.zig | 1 + src/bundler/bundle_v2.zig | 12 +- src/cli/create_command.zig | 2 +- src/cli/init_command.zig | 1 + src/cli/pm_trusted_command.zig | 2 +- src/install/install.zig | 91 +++++++++--- src/install/lockfile.zig | 1 + src/js_lexer.zig | 46 ++++++- src/js_printer.zig | 129 +++++++++++------- src/json_parser.zig | 50 ++++++- .../registry/bun-install-registry.test.ts | 42 ++++++ 12 files changed, 297 insertions(+), 82 deletions(-) diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index 155cbba348..41e29669f5 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -3880,7 +3880,7 @@ const TOMLObject = struct { return .zero; }; var writer = js_printer.BufferPrinter.init(buffer_writer); - _ = js_printer.printJSON(*js_printer.BufferPrinter, &writer, parse_result, &source) catch { + _ = js_printer.printJSON(*js_printer.BufferPrinter, &writer, parse_result, &source, .{}) catch { globalThis.throwValue(log.toJS(globalThis, default_allocator, "Failed to print toml")); return .zero; }; diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 3178abffb6..ac68ece62a 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -6166,6 +6166,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp &writer, bun.Global.BunInfo.generate(*Bundler, &JSC.VirtualMachine.get().bundler, allocator) catch unreachable, &source, + .{}, ) catch unreachable; resp.writeStatus("200 OK"); diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index c79a518633..30db6e9ce8 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -6785,12 +6785,11 @@ const LinkerContext = struct { const runtimeRequireRef = if (c.resolver.opts.target.isBun()) null else c.graph.symbols.follow(runtime_members.get("__require").?.ref); { - const indent: usize = 0; // TODO: IIFE indent const print_options = js_printer.Options{ - // TODO: IIFE - .indent = indent, + // TODO: IIFE indent + .indent = .{}, .has_run_symbol_renamer = true, .allocator = worker.allocator, @@ -7678,8 +7677,8 @@ const LinkerContext = struct { } const print_options = js_printer.Options{ - // TODO: IIFE - .indent = 0, + // TODO: IIFE indent + .indent = .{}, .has_run_symbol_renamer = true, .allocator = allocator, @@ -8959,8 +8958,7 @@ const LinkerContext = struct { const print_options = js_printer.Options{ // TODO: IIFE - .indent = 0, - + .indent = .{}, .commonjs_named_exports = ast.commonjs_named_exports, .commonjs_named_exports_ref = ast.exports_ref, .commonjs_named_exports_deoptimized = flags.wrap == .cjs, diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index d1a324a7ed..e6f00e0550 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -1436,7 +1436,7 @@ pub const CreateCommand = struct { const package_json_writer = JSPrinter.NewFileWriter(package_json_file.?); - const written = JSPrinter.printJSON(@TypeOf(package_json_writer), package_json_writer, package_json_expr, &source) catch |err| { + const written = JSPrinter.printJSON(@TypeOf(package_json_writer), package_json_writer, package_json_expr, &source, .{}) catch |err| { Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); package_json_file = null; break :process_package_json; diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index 5683b83d7d..35d22af2fe 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -362,6 +362,7 @@ pub const InitCommand = struct { package_json_writer, js_ast.Expr{ .data = .{ .e_object = fields.object }, .loc = logger.Loc.Empty }, &logger.Source.initEmptyFile("package.json"), + .{}, ) catch |err| { Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); package_json_file = null; diff --git a/src/cli/pm_trusted_command.zig b/src/cli/pm_trusted_command.zig index 0e133f0571..159aad49b8 100644 --- a/src/cli/pm_trusted_command.zig +++ b/src/cli/pm_trusted_command.zig @@ -425,7 +425,7 @@ pub const TrustCommand = struct { buffer_writer.append_newline = package_json_contents.len > 0 and package_json_contents[package_json_contents.len - 1] == '\n'; var package_json_writer = bun.js_printer.BufferPrinter.init(buffer_writer); - _ = bun.js_printer.printJSON(@TypeOf(&package_json_writer), &package_json_writer, package_json, &package_json_source) catch |err| { + _ = bun.js_printer.printJSON(@TypeOf(&package_json_writer), &package_json_writer, package_json, &package_json_source, .{}) catch |err| { Output.errGeneric("failed to print package.json: {s}", .{@errorName(err)}); Global.crash(); }; diff --git a/src/install/install.zig b/src/install/install.zig index 314552d9a1..4c953d40d9 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -2718,6 +2718,7 @@ pub const PackageManager = struct { pub const MapEntry = struct { root: Expr, source: logger.Source, + indentation: JSPrinter.Options.Indentation = .{}, }; pub const Map = bun.StringHashMapUnmanaged(MapEntry); @@ -2725,6 +2726,7 @@ pub const PackageManager = struct { pub const GetJSONOptions = struct { init_reset_store: bool = true, always_decode_escape_sequences: bool = true, + guess_indentation: bool = false, }; pub const GetResult = union(enum) { @@ -2745,8 +2747,14 @@ pub const PackageManager = struct { /// Given an absolute path to a workspace package.json, return the AST /// and contents of the file. If the package.json is not present in the /// cache, it will be read from disk and parsed, and stored in the cache. - pub fn getWithPath(this: *@This(), allocator: std.mem.Allocator, log: *logger.Log, abs_package_json_path: anytype, comptime opts: GetJSONOptions) GetResult { - bun.assert(std.fs.path.isAbsolute(abs_package_json_path)); + pub fn getWithPath( + this: *@This(), + allocator: std.mem.Allocator, + log: *logger.Log, + abs_package_json_path: anytype, + comptime opts: GetJSONOptions, + ) GetResult { + bun.assertWithLocation(std.fs.path.isAbsolute(abs_package_json_path), @src()); var buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; const path = if (comptime !Environment.isWindows) @@ -2769,16 +2777,25 @@ pub const PackageManager = struct { if (comptime opts.init_reset_store) initializeStore(); - const _json = if (comptime opts.always_decode_escape_sequences) - json_parser.ParsePackageJSONUTF8AlwaysDecode(&source, log, allocator) - else - json_parser.ParsePackageJSONUTF8(&source, log, allocator); + const json_result = json_parser.ParsePackageJSONUTF8WithOpts( + &source, + log, + allocator, + .{ + .is_json = true, + .allow_comments = true, + .allow_trailing_commas = true, + .always_decode_escape_sequences = opts.always_decode_escape_sequences, + .guess_indentation = opts.guess_indentation, + }, + ); - const json = _json catch |err| return .{ .parse_err = err }; + const json = json_result catch |err| return .{ .parse_err = err }; entry.value_ptr.* = .{ - .root = json.deepClone(allocator) catch bun.outOfMemory(), + .root = json.root.deepClone(allocator) catch bun.outOfMemory(), .source = source, + .indentation = json.indentation, }; entry.key_ptr.* = key; @@ -2794,7 +2811,7 @@ pub const PackageManager = struct { source: logger.Source, comptime opts: GetJSONOptions, ) GetResult { - bun.assert(std.fs.path.isAbsolute(source.path.text)); + bun.assertWithLocation(std.fs.path.isAbsolute(source.path.text), @src()); var buf: if (Environment.isWindows) bun.PathBuffer else void = undefined; const path = if (comptime !Environment.isWindows) @@ -2813,15 +2830,25 @@ pub const PackageManager = struct { if (comptime opts.init_reset_store) initializeStore(); - const _json = if (comptime opts.always_decode_escape_sequences) - json_parser.ParsePackageJSONUTF8AlwaysDecode(&source, log, allocator) - else - json_parser.ParsePackageJSONUTF8(&source, log, allocator); - const json = _json catch |err| return .{ .parse_err = err }; + const json_result = json_parser.ParsePackageJSONUTF8WithOpts( + &source, + log, + allocator, + .{ + .is_json = true, + .allow_comments = true, + .allow_trailing_commas = true, + .always_decode_escape_sequences = opts.always_decode_escape_sequences, + .guess_indentation = opts.guess_indentation, + }, + ); + + const json = json_result catch |err| return .{ .parse_err = err }; entry.value_ptr.* = .{ - .root = json.deepClone(allocator) catch bun.outOfMemory(), + .root = json.root.deepClone(allocator) catch bun.outOfMemory(), .source = source, + .indentation = json.indentation, }; entry.key_ptr.* = allocator.dupe(u8, path) catch bun.outOfMemory(); @@ -9791,6 +9818,7 @@ pub const PackageManager = struct { manager.original_package_json_path, .{ .always_decode_escape_sequences = false, + .guess_indentation = true, }, )) { .parse_err => |err| { @@ -9814,6 +9842,7 @@ pub const PackageManager = struct { }, .entry => |entry| entry, }; + const current_package_json_indent = current_package_json.indentation; // If there originally was a newline at the end of their package.json, preserve it // so that we don't cause unnecessary diffs in their git history. @@ -9950,7 +9979,15 @@ pub const PackageManager = struct { buffer_writer.append_newline = preserve_trailing_newline_at_eof_for_package_json; var package_json_writer = JSPrinter.BufferPrinter.init(buffer_writer); - var written = JSPrinter.printJSON(@TypeOf(&package_json_writer), &package_json_writer, current_package_json.root, ¤t_package_json.source) catch |err| { + var written = JSPrinter.printJSON( + @TypeOf(&package_json_writer), + &package_json_writer, + current_package_json.root, + ¤t_package_json.source, + .{ + .indent = current_package_json_indent, + }, + ) catch |err| { Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); Global.crash(); }; @@ -9980,7 +10017,14 @@ pub const PackageManager = struct { // The lifetime of this pointer is only valid until the next call to `getWithPath`, which can happen after this scope. // https://github.com/oven-sh/bun/issues/12288 - const root_package_json = switch (manager.workspace_package_json_cache.getWithPath(manager.allocator, manager.log, root_package_json_path, .{})) { + const root_package_json = switch (manager.workspace_package_json_cache.getWithPath( + manager.allocator, + manager.log, + root_package_json_path, + .{ + .guess_indentation = true, + }, + )) { .parse_err => |err| { switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| { @@ -10015,7 +10059,15 @@ pub const PackageManager = struct { buffer_writer2.append_newline = preserve_trailing_newline_at_eof_for_package_json; var package_json_writer2 = JSPrinter.BufferPrinter.init(buffer_writer2); - _ = JSPrinter.printJSON(@TypeOf(&package_json_writer2), &package_json_writer2, root_package_json.root, &root_package_json.source) catch |err| { + _ = JSPrinter.printJSON( + @TypeOf(&package_json_writer2), + &package_json_writer2, + root_package_json.root, + &root_package_json.source, + .{ + .indent = root_package_json.indentation, + }, + ) catch |err| { Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); Global.crash(); }; @@ -10075,6 +10127,9 @@ pub const PackageManager = struct { &package_json_writer_two, new_package_json, &source, + .{ + .indent = current_package_json_indent, + }, ) catch |err| { Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); Global.crash(); diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index bf3d44f2af..708c21ce68 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -4294,6 +4294,7 @@ pub const Package = extern struct { ) !WorkspaceEntry { const workspace_json = try json_cache.getWithPath(allocator, log, abs_package_json_path, .{ .init_reset_store = false, + .guess_indentation = true, }).unwrap(); const name_expr = workspace_json.root.get("name") orelse return error.MissingPackageName; diff --git a/src/js_lexer.zig b/src/js_lexer.zig index af5f9f6382..681439daed 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -17,6 +17,7 @@ const default_allocator = bun.default_allocator; const C = bun.C; const FeatureFlags = @import("feature_flags.zig"); const JavascriptString = []const u16; +const Indentation = bun.js_printer.Options.Indentation; const unicode = std.unicode; @@ -75,6 +76,8 @@ pub const JSONOptions = struct { was_originally_macro: bool = false, always_decode_escape_sequences: bool = false, + + guess_indentation: bool = false, }; pub fn decodeStringLiteralEscapeSequencesToUTF16(bytes: string, allocator: std.mem.Allocator) ![]const u16 { @@ -102,6 +105,7 @@ pub fn NewLexer( json_options.json_warn_duplicate_keys, json_options.was_originally_macro, json_options.always_decode_escape_sequences, + json_options.guess_indentation, ); } @@ -114,6 +118,7 @@ fn NewLexer_( comptime json_options_json_warn_duplicate_keys: bool, comptime json_options_was_originally_macro: bool, comptime json_options_always_decode_escape_sequences: bool, + comptime json_options_guess_indentation: bool, ) type { const json_options = JSONOptions{ .is_json = json_options_is_json, @@ -124,6 +129,7 @@ fn NewLexer_( .json_warn_duplicate_keys = json_options_json_warn_duplicate_keys, .was_originally_macro = json_options_was_originally_macro, .always_decode_escape_sequences = json_options_always_decode_escape_sequences, + .guess_indentation = json_options_guess_indentation, }; return struct { const LexerType = @This(); @@ -189,6 +195,16 @@ fn NewLexer_( track_comments: bool = false, all_comments: std.ArrayList(logger.Range), + indent_info: if (json_options.guess_indentation) + struct { + guess: Indentation = .{}, + first_newline: bool = true, + } + else + void = if (json_options.guess_indentation) + .{} + else {}, + pub fn clone(self: *const LexerType) LexerType { return LexerType{ .log = self.log, @@ -1211,8 +1227,36 @@ fn NewLexer_( } }, '\r', '\n', 0x2028, 0x2029 => { - lexer.step(); lexer.has_newline_before = true; + + if (comptime json_options.guess_indentation) { + if (lexer.indent_info.first_newline and lexer.code_point == '\n') { + while (lexer.code_point == '\n' or lexer.code_point == '\r') { + lexer.step(); + } + + if (lexer.code_point != ' ' and lexer.code_point != '\t') { + // try to get the next one. this handles cases where the file starts + // with a newline + continue; + } + + lexer.indent_info.first_newline = false; + + const indent_character = lexer.code_point; + var count: usize = 0; + while (lexer.code_point == indent_character) { + lexer.step(); + count += 1; + } + + lexer.indent_info.guess.character = if (indent_character == ' ') .space else .tab; + lexer.indent_info.guess.scalar = count; + continue; + } + } + + lexer.step(); continue; }, '\t', ' ' => { diff --git a/src/js_printer.zig b/src/js_printer.zig index 382c33bf57..376a5b2eae 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -88,7 +88,8 @@ pub fn canPrintWithoutEscape(comptime CodePointType: type, c: CodePointType, com } } -const indentation_buf = [_]u8{' '} ** 128; +const indentation_space_buf = [_]u8{' '} ** 128; +const indentation_tab_buf = [_]u8{'\t'} ** 128; pub fn bestQuoteCharForString(comptime Type: type, str: []const Type, allow_backtick: bool) u8 { var single_cost: usize = 0; @@ -516,7 +517,7 @@ pub const Options = struct { to_esm_ref: Ref = Ref.None, require_ref: ?Ref = null, import_meta_ref: Ref = Ref.None, - indent: usize = 0, + indent: Indentation = .{}, externals: []u32 = &[_]u32{}, runtime_imports: runtime.Runtime.Imports = runtime.Runtime.Imports{}, module_hash: u32 = 0, @@ -567,9 +568,14 @@ pub const Options = struct { // us do binary search on to figure out what line a given AST node came from line_offset_tables: ?SourceMap.LineOffsetTable.List = null, - pub inline fn unindent(self: *Options) void { - self.indent -|= 1; - } + // Default indentation is 2 spaces + pub const Indentation = struct { + scalar: usize = 2, + count: usize = 0, + character: Character = .space, + + pub const Character = enum { tab, space }; + }; pub fn requireOrImportMetaForSource( self: *const Options, @@ -1009,12 +1015,25 @@ fn NewPrinter( p.print(str); } + pub inline fn unindent(p: *Printer) void { + p.options.indent.count -|= 1; + } + + pub inline fn indent(p: *Printer) void { + p.options.indent.count += 1; + } + pub fn printIndent(p: *Printer) void { - if (p.options.indent == 0 or p.options.minify_whitespace) { + if (p.options.indent.count == 0 or p.options.minify_whitespace) { return; } - var i: usize = p.options.indent * 2; + const indentation_buf = switch (p.options.indent.character) { + .space => indentation_space_buf, + .tab => indentation_tab_buf, + }; + + var i: usize = p.options.indent.count * p.options.indent.scalar; while (i > 0) { const amt = @min(i, indentation_buf.len); @@ -1113,7 +1132,7 @@ fn NewPrinter( if (!import.is_single_line) { p.printNewline(); - p.options.indent += 1; + p.indent(); p.printIndent(); } @@ -1133,7 +1152,7 @@ fn NewPrinter( if (!import.is_single_line) { p.printNewline(); - p.options.unindent(); + p.unindent(); } else { p.printSpace(); } @@ -1205,9 +1224,9 @@ fn NewPrinter( }, else => { p.printNewline(); - p.options.indent += 1; + p.indent(); p.printStmt(stmt) catch unreachable; - p.options.unindent(); + p.unindent(); }, } } @@ -1224,9 +1243,9 @@ fn NewPrinter( p.print("{"); p.printNewline(); - p.options.indent += 1; + p.indent(); p.printBlockBody(stmts); - p.options.unindent(); + p.unindent(); p.needs_semicolon = false; p.printIndent(); @@ -1241,10 +1260,10 @@ fn NewPrinter( p.print("{"); p.printNewline(); - p.options.indent += 1; + p.indent(); p.printBlockBody(prepend); p.printBlockBody(stmts); - p.options.unindent(); + p.unindent(); p.needs_semicolon = false; p.printIndent(); @@ -1484,7 +1503,7 @@ fn NewPrinter( p.addSourceMapping(class.body_loc); p.print("{"); p.printNewline(); - p.options.indent += 1; + p.indent(); for (class.properties) |item| { p.printSemicolonIfNeeded(); @@ -1508,7 +1527,7 @@ fn NewPrinter( } p.needs_semicolon = false; - p.options.unindent(); + p.unindent(); p.printIndent(); if (class.close_brace_loc.start > class.body_loc.start) p.addSourceMapping(class.close_brace_loc); @@ -2073,7 +2092,7 @@ fn NewPrinter( // External import() if (leading_interior_comments.len > 0) { p.printNewline(); - p.options.indent += 1; + p.indent(); for (leading_interior_comments) |comment| { p.printIndentedComment(comment.text); } @@ -2121,7 +2140,7 @@ fn NewPrinter( if (leading_interior_comments.len > 0) { p.printNewline(); - p.options.unindent(); + p.unindent(); p.printIndent(); } @@ -2508,7 +2527,7 @@ fn NewPrinter( p.print("import("); if (e.leading_interior_comments.len > 0) { p.printNewline(); - p.options.indent += 1; + p.indent(); for (e.leading_interior_comments) |comment| { p.printIndentedComment(comment.text); } @@ -2538,7 +2557,7 @@ fn NewPrinter( if (e.leading_interior_comments.len > 0) { p.printNewline(); - p.options.unindent(); + p.unindent(); p.printIndent(); } p.print(")"); @@ -2783,7 +2802,7 @@ fn NewPrinter( const items = e.items.slice(); if (items.len > 0) { if (!e.is_single_line) { - p.options.indent += 1; + p.indent(); } for (items, 0..) |item, i| { @@ -2806,7 +2825,7 @@ fn NewPrinter( } if (!e.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } @@ -2832,7 +2851,9 @@ fn NewPrinter( p.print("{"); const props = expr.data.e_object.properties.slice(); if (props.len > 0) { - p.options.indent += @as(usize, @intFromBool(!e.is_single_line)); + if (!e.is_single_line) { + p.indent(); + } if (e.is_single_line) { p.printSpace(); @@ -2857,7 +2878,7 @@ fn NewPrinter( } if (!e.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } else { @@ -3715,7 +3736,9 @@ fn NewPrinter( .b_array => |b| { p.print("["); if (b.items.len > 0) { - p.options.indent += @as(usize, @intFromBool(!b.is_single_line)); + if (!b.is_single_line) { + p.indent(); + } for (b.items, 0..) |*item, i| { if (i != 0) { @@ -3746,7 +3769,7 @@ fn NewPrinter( } if (!b.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } @@ -3757,8 +3780,9 @@ fn NewPrinter( .b_object => |b| { p.print("{"); if (b.properties.len > 0) { - p.options.indent += - @as(usize, @intFromBool(!b.is_single_line)); + if (!b.is_single_line) { + p.indent(); + } for (b.properties, 0..) |*property, i| { if (i != 0) { @@ -3847,7 +3871,7 @@ fn NewPrinter( } if (!b.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } else { @@ -3960,7 +3984,7 @@ fn NewPrinter( } }, .s_empty => { - if (p.prev_stmt_tag == .s_empty and p.options.indent == 0) return; + if (p.prev_stmt_tag == .s_empty and p.options.indent.count == 0) return; p.printIndent(); p.print(";"); @@ -4178,7 +4202,7 @@ fn NewPrinter( p.print("{"); if (!s.is_single_line) { - p.options.indent += 1; + p.indent(); } else { p.printSpace(); } @@ -4200,7 +4224,7 @@ fn NewPrinter( } if (!s.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } else { @@ -4275,7 +4299,7 @@ fn NewPrinter( p.printWhitespacer(ws("export {")); if (!s.is_single_line) { - p.options.indent += 1; + p.indent(); } else { p.printSpace(); } @@ -4296,7 +4320,7 @@ fn NewPrinter( } if (!s.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } else { @@ -4342,10 +4366,10 @@ fn NewPrinter( }, else => { p.printNewline(); - p.options.indent += 1; + p.indent(); p.printStmt(s.body) catch unreachable; p.printSemicolonIfNeeded(); - p.options.unindent(); + p.unindent(); p.printIndent(); }, } @@ -4412,7 +4436,7 @@ fn NewPrinter( p.printBody(s.body); }, .s_label => |s| { - if (!p.options.minify_whitespace and p.options.indent > 0) { + if (!p.options.minify_whitespace and p.options.indent.count > 0) { p.addSourceMapping(stmt.loc); p.printIndent(); } @@ -4490,7 +4514,7 @@ fn NewPrinter( p.printSpace(); p.print("{"); p.printNewline(); - p.options.indent += 1; + p.indent(); for (s.cases) |c| { p.printSemicolonIfNeeded(); @@ -4519,15 +4543,15 @@ fn NewPrinter( } p.printNewline(); - p.options.indent += 1; + p.indent(); for (c.body) |st| { p.printSemicolonIfNeeded(); p.printStmt(st) catch unreachable; } - p.options.unindent(); + p.unindent(); } - p.options.unindent(); + p.unindent(); p.printIndent(); p.print("}"); p.printNewline(); @@ -4747,7 +4771,7 @@ fn NewPrinter( p.print("{"); if (!s.is_single_line) { - p.options.unindent(); + p.unindent(); } for (s.items, 0..) |item, i| { @@ -4767,7 +4791,7 @@ fn NewPrinter( } if (!s.is_single_line) { - p.options.unindent(); + p.unindent(); p.printNewline(); p.printIndent(); } @@ -4888,7 +4912,7 @@ fn NewPrinter( p.printSemicolonAfterStatement(); }, .s_expr => |s| { - if (!p.options.minify_whitespace and p.options.indent > 0) { + if (!p.options.minify_whitespace and p.options.indent.count > 0) { p.addSourceMapping(stmt.loc); p.printIndent(); } @@ -5160,9 +5184,9 @@ fn NewPrinter( p.print("{"); p.printNewline(); - p.options.indent += 1; + p.indent(); p.printStmt(s.yes) catch unreachable; - p.options.unindent(); + p.unindent(); p.needs_semicolon = false; p.printIndent(); @@ -5175,9 +5199,9 @@ fn NewPrinter( } } else { p.printNewline(); - p.options.indent += 1; + p.indent(); p.printStmt(s.yes) catch unreachable; - p.options.unindent(); + p.unindent(); if (s.no != null) { p.printIndent(); @@ -5202,9 +5226,9 @@ fn NewPrinter( }, else => { p.printNewline(); - p.options.indent += 1; + p.indent(); p.printStmt(no_block) catch unreachable; - p.options.unindent(); + p.unindent(); }, } } @@ -6155,6 +6179,7 @@ pub fn printJSON( _writer: Writer, expr: Expr, source: *const logger.Source, + opts: Options, ) !usize { const PrinterType = NewPrinter(false, Writer, false, false, true, false); const writer = _writer; @@ -6172,7 +6197,7 @@ pub fn printJSON( var printer = PrinterType.init( writer, ast.import_records.slice(), - .{}, + opts, renamer.toRenamer(), undefined, ); diff --git a/src/json_parser.zig b/src/json_parser.zig index 1944ef153c..87f7ca1ffe 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -41,6 +41,7 @@ const Level = js_ast.Op.Level; const Op = js_ast.Op; const Scope = js_ast.Scope; const locModuleScope = logger.Loc.Empty; +const Indentation = js_printer.Options.Indentation; const LEXER_DEBUGGER_WORKAROUND = false; @@ -115,6 +116,7 @@ fn JSONLikeParser(comptime opts: js_lexer.JSONOptions) type { opts.json_warn_duplicate_keys, opts.was_originally_macro, opts.always_decode_escape_sequences, + opts.guess_indentation, ); } @@ -127,6 +129,7 @@ fn JSONLikeParser_( comptime opts_json_warn_duplicate_keys: bool, comptime opts_was_originally_macro: bool, comptime opts_always_decode_escape_sequences: bool, + comptime opts_guess_indentation: bool, ) type { const opts = js_lexer.JSONOptions{ .is_json = opts_is_json, @@ -137,6 +140,7 @@ fn JSONLikeParser_( .json_warn_duplicate_keys = opts_json_warn_duplicate_keys, .was_originally_macro = opts_was_originally_macro, .always_decode_escape_sequences = opts_always_decode_escape_sequences, + .guess_indentation = opts_guess_indentation, }; return struct { const Lexer = js_lexer.NewLexer(if (LEXER_DEBUGGER_WORKAROUND) js_lexer.JSONOptions{} else opts); @@ -810,6 +814,50 @@ pub fn ParsePackageJSONUTF8AlwaysDecode( return try parser.parseExpr(false, true); } +const JsonResult = struct { + root: Expr, + indentation: Indentation = .{}, +}; + +pub fn ParsePackageJSONUTF8WithOpts( + source: *const logger.Source, + log: *logger.Log, + allocator: std.mem.Allocator, + comptime opts: js_lexer.JSONOptions, +) !JsonResult { + const len = source.contents.len; + + switch (len) { + // This is to be consisntent with how disabled JS files are handled + 0 => { + return .{ + .root = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }, + }; + }, + // This is a fast pass I guess + 2 => { + if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) { + return .{ .root = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data } }; + } else if (strings.eqlComptime(source.contents[0..1], "{}")) { + return .{ .root = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data } }; + } else if (strings.eqlComptime(source.contents[0..1], "[]")) { + return .{ .root = Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data } }; + } + }, + else => {}, + } + + var parser = try JSONLikeParser(opts).init(allocator, source.*, log); + bun.assert(parser.source().contents.len > 0); + + const root = try parser.parseExpr(false, true); + + return .{ + .root = root, + .indentation = if (comptime opts.guess_indentation) parser.lexer.indent_info.guess else .{}, + }; +} + /// Parse Package JSON /// Allow trailing commas & comments. /// This eagerly transcodes UTF-16 strings into UTF-8 strings @@ -1032,7 +1080,7 @@ fn expectPrintedJSON(_contents: string, expected: string) !void { const buffer_writer = try js_printer.BufferWriter.init(default_allocator); var writer = js_printer.BufferPrinter.init(buffer_writer); - const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, &source); + const written = try js_printer.printJSON(@TypeOf(&writer), &writer, expr, &source, .{}); var js = writer.ctx.buffer.list.items.ptr[0 .. written + 1]; if (js.len > 1) { diff --git a/test/cli/install/registry/bun-install-registry.test.ts b/test/cli/install/registry/bun-install-registry.test.ts index b0b1e9d715..a9e767c258 100644 --- a/test/cli/install/registry/bun-install-registry.test.ts +++ b/test/cli/install/registry/bun-install-registry.test.ts @@ -378,6 +378,48 @@ module.exports = function lmao() { ); }); +describe("package.json indentation", async () => { + test("works for root and workspace packages", async () => { + await Promise.all([ + // 5 space indentation + write(join(packageDir, "package.json"), `\n{\n\n "name": "foo",\n"workspaces": ["packages/*"]\n}`), + // 1 tab indentation + write(join(packageDir, "packages", "bar", "package.json"), `\n{\n\n\t"name": "bar",\n}`), + ]); + + let { exited } = spawn({ + cmd: [bunExe(), "add", "no-deps"], + cwd: packageDir, + stdout: "ignore", + stderr: "ignore", + env, + }); + + expect(await exited).toBe(0); + + const rootPackageJson = await file(join(packageDir, "package.json")).text(); + + expect(rootPackageJson).toBe( + `{\n "name": "foo",\n "workspaces": ["packages/*"],\n "dependencies": {\n "no-deps": "^2.0.0"\n }\n}`, + ); + + // now add to workspace. it should keep tab indentation + ({ exited } = spawn({ + cmd: [bunExe(), "add", "no-deps"], + cwd: join(packageDir, "packages", "bar"), + stdout: "inherit", + stderr: "inherit", + env, + })); + + expect(await exited).toBe(0); + + expect(await file(join(packageDir, "package.json")).text()).toBe(rootPackageJson); + const workspacePackageJson = await file(join(packageDir, "packages", "bar", "package.json")).text(); + expect(workspacePackageJson).toBe(`{\n\t"name": "bar",\n\t"dependencies": {\n\t\t"no-deps": "^2.0.0"\n\t}\n}`); + }); +}); + describe("optionalDependencies", () => { for (const optional of [true, false]) { test(`exit code is ${optional ? 0 : 1} when ${optional ? "optional" : ""} dependency tarball is missing`, async () => {