From 6415cc3e92cf5ff50b9ffc5c70cc0309ab56dc2e Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Fri, 23 Aug 2024 02:15:13 -0700 Subject: [PATCH] implement `bun outdated` (#13461) Co-authored-by: Zack Radisic --- src/bun.zig | 23 +- src/cli.zig | 31 +- src/cli/outdated_command.zig | 480 +++++++++++++ src/deps/diffz/DiffMatchPatch.zig | 116 +-- src/install/install.zig | 62 +- src/install/npm.zig | 98 ++- src/install/semver.zig | 187 ++++- src/js/internal-for-testing.ts | 7 + .../bun-install-registry.test.ts.snap | 81 +++ .../registry/bun-install-registry.test.ts | 664 ++++++++++++++++-- test/harness.ts | 11 + 11 files changed, 1601 insertions(+), 159 deletions(-) create mode 100644 src/cli/outdated_command.zig diff --git a/src/bun.zig b/src/bun.zig index 38bf6ffce8..d93fe7d9e3 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -813,14 +813,23 @@ pub fn openDirForIteration(dir: std.fs.Dir, path_: []const u8) !std.fs.Dir { } pub fn openDirAbsolute(path_: []const u8) !std.fs.Dir { - if (comptime Environment.isWindows) { - const res = try sys.openDirAtWindowsA(invalid_fd, path_, .{ .iterable = true, .can_rename_or_delete = true, .read_only = true }).unwrap(); - return res.asDir(); - } else { - const fd = try sys.openA(path_, O.DIRECTORY | O.CLOEXEC | O.RDONLY, 0).unwrap(); - return fd.asDir(); - } + const fd = if (comptime Environment.isWindows) + try sys.openDirAtWindowsA(invalid_fd, path_, .{ .iterable = true, .can_rename_or_delete = true, .read_only = true }).unwrap() + else + try sys.openA(path_, O.DIRECTORY | O.CLOEXEC | O.RDONLY, 0).unwrap(); + + return fd.asDir(); } + +pub fn openDirAbsoluteNotForDeletingOrRenaming(path_: []const u8) !std.fs.Dir { + const fd = if (comptime Environment.isWindows) + try sys.openDirAtWindowsA(invalid_fd, path_, .{ .iterable = true, .can_rename_or_delete = false, .read_only = true }).unwrap() + else + try sys.openA(path_, O.DIRECTORY | O.CLOEXEC | O.RDONLY, 0).unwrap(); + + return fd.asDir(); +} + pub const MimallocArena = @import("./mimalloc_arena.zig").Arena; pub fn getRuntimeFeatureFlag(comptime flag: [:0]const u8) bool { return struct { diff --git a/src/cli.zig b/src/cli.zig index 71c4551ca2..943afb2bf1 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -115,6 +115,7 @@ pub const BunxCommand = @import("./cli/bunx_command.zig").BunxCommand; pub const ExecCommand = @import("./cli/exec_command.zig").ExecCommand; pub const PatchCommand = @import("./cli/patch_command.zig").PatchCommand; pub const PatchCommitCommand = @import("./cli/patch_commit_command.zig").PatchCommitCommand; +pub const OutdatedCommand = @import("./cli/outdated_command.zig").OutdatedCommand; pub const Arguments = struct { pub fn loader_resolver(in: string) !Api.Loader { @@ -1438,6 +1439,8 @@ pub const Command = struct { RootCommandMatcher.case("exec") => .ExecCommand, + RootCommandMatcher.case("outdated") => .OutdatedCommand, + // These are reserved for future use by Bun, so that someone // doing `bun deploy` to run a script doesn't accidentally break // when we add our actual command @@ -1452,7 +1455,6 @@ pub const Command = struct { RootCommandMatcher.case("whoami") => .ReservedCommand, RootCommandMatcher.case("publish") => .ReservedCommand, RootCommandMatcher.case("prune") => .ReservedCommand, - RootCommandMatcher.case("outdated") => .ReservedCommand, RootCommandMatcher.case("list") => .ReservedCommand, RootCommandMatcher.case("why") => .ReservedCommand, @@ -1574,6 +1576,13 @@ pub const Command = struct { try PatchCommitCommand.exec(ctx); return; }, + .OutdatedCommand => { + if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .OutdatedCommand) unreachable; + const ctx = try Command.init(allocator, log, .OutdatedCommand); + + try OutdatedCommand.exec(ctx); + return; + }, .BunxCommand => { if (comptime bun.fast_debug_build_mode and bun.fast_debug_build_cmd != .BunxCommand) unreachable; const ctx = try Command.init(allocator, log, .BunxCommand); @@ -2141,6 +2150,7 @@ pub const Command = struct { ExecCommand, PatchCommand, PatchCommitCommand, + OutdatedCommand, /// Used by crash reports. /// @@ -2172,6 +2182,7 @@ pub const Command = struct { .ExecCommand => 'e', .PatchCommand => 'x', .PatchCommitCommand => 'z', + .OutdatedCommand => 'o', }; } @@ -2395,6 +2406,9 @@ pub const Command = struct { , .{}); Output.flush(); }, + .OutdatedCommand => { + Install.PackageManager.CommandLineArguments.printHelp(.outdated); + }, else => { HelpCommand.printWithReason(.explicit); }, @@ -2403,7 +2417,16 @@ pub const Command = struct { pub fn readGlobalConfig(this: Tag) bool { return switch (this) { - .BunxCommand, .PackageManagerCommand, .InstallCommand, .AddCommand, .RemoveCommand, .UpdateCommand, .PatchCommand, .PatchCommitCommand => true, + .BunxCommand, + .PackageManagerCommand, + .InstallCommand, + .AddCommand, + .RemoveCommand, + .UpdateCommand, + .PatchCommand, + .PatchCommitCommand, + .OutdatedCommand, + => true, else => false, }; } @@ -2420,6 +2443,7 @@ pub const Command = struct { .UpdateCommand, .PatchCommand, .PatchCommitCommand, + .OutdatedCommand, => true, else => false, }; @@ -2439,6 +2463,7 @@ pub const Command = struct { .AutoCommand = true, .RunCommand = true, .RunAsNodeCommand = true, + .OutdatedCommand = true, }); pub const always_loads_config: std.EnumArray(Tag, bool) = std.EnumArray(Tag, bool).initDefault(false, .{ @@ -2452,6 +2477,7 @@ pub const Command = struct { .PatchCommitCommand = true, .PackageManagerCommand = true, .BunxCommand = true, + .OutdatedCommand = true, }); pub const uses_global_options: std.EnumArray(Tag, bool) = std.EnumArray(Tag, bool).initDefault(true, .{ @@ -2466,6 +2492,7 @@ pub const Command = struct { .LinkCommand = false, .UnlinkCommand = false, .BunxCommand = false, + .OutdatedCommand = false, }); }; }; diff --git a/src/cli/outdated_command.zig b/src/cli/outdated_command.zig new file mode 100644 index 0000000000..ea92b5f237 --- /dev/null +++ b/src/cli/outdated_command.zig @@ -0,0 +1,480 @@ +const std = @import("std"); +const bun = @import("root").bun; +const Global = bun.Global; +const Output = bun.Output; +const Command = bun.CLI.Command; +const Install = bun.install; +const PackageManager = Install.PackageManager; +const Lockfile = Install.Lockfile; +const PackageID = Install.PackageID; +const DependencyID = Install.DependencyID; +const Behavior = Install.Dependency.Behavior; +const invalid_package_id = Install.invalid_package_id; +const string = bun.string; + +fn Table( + comptime num_columns: usize, + comptime column_color: []const u8, + comptime column_left_pad: usize, + comptime column_right_pad: usize, + comptime enable_ansi_colors: bool, +) type { + return struct { + column_names: [num_columns][]const u8, + column_inside_lengths: [num_columns]usize, + + pub fn topLeftSep(_: *const @This()) string { + return if (enable_ansi_colors) "┌" else "|"; + } + pub fn topRightSep(_: *const @This()) string { + return if (enable_ansi_colors) "┐" else "|"; + } + pub fn topColumnSep(_: *const @This()) string { + return if (enable_ansi_colors) "┬" else "-"; + } + + pub fn bottomLeftSep(_: *const @This()) string { + return if (enable_ansi_colors) "└" else "|"; + } + pub fn bottomRightSep(_: *const @This()) string { + return if (enable_ansi_colors) "┘" else "|"; + } + pub fn bottomColumnSep(_: *const @This()) string { + return if (enable_ansi_colors) "┴" else "-"; + } + + pub fn middleLeftSep(_: *const @This()) string { + return if (enable_ansi_colors) "├" else "|"; + } + pub fn middleRightSep(_: *const @This()) string { + return if (enable_ansi_colors) "┤" else "|"; + } + pub fn middleColumnSep(_: *const @This()) string { + return if (enable_ansi_colors) "┼" else "|"; + } + + pub fn horizontalEdge(_: *const @This()) string { + return if (enable_ansi_colors) "─" else "-"; + } + pub fn verticalEdge(_: *const @This()) string { + return if (enable_ansi_colors) "│" else "|"; + } + + pub fn init(column_names_: [num_columns][]const u8, column_inside_lengths_: [num_columns]usize) @This() { + return .{ + .column_names = column_names_, + .column_inside_lengths = column_inside_lengths_, + }; + } + + pub fn printTopLineSeparator(this: *const @This()) void { + this.printLine(this.topLeftSep(), this.topRightSep(), this.topColumnSep()); + } + + pub fn printBottomLineSeparator(this: *const @This()) void { + this.printLine(this.bottomLeftSep(), this.bottomRightSep(), this.bottomColumnSep()); + } + + pub fn printLineSeparator(this: *const @This()) void { + this.printLine(this.middleLeftSep(), this.middleRightSep(), this.middleColumnSep()); + } + + pub fn printLine(this: *const @This(), left_edge_separator: string, right_edge_separator: string, column_separator: string) void { + for (this.column_inside_lengths, 0..) |column_inside_length, i| { + if (i == 0) { + Output.pretty("{s}", .{left_edge_separator}); + } else { + Output.pretty("{s}", .{column_separator}); + } + + for (0..column_left_pad + column_inside_length + column_right_pad) |_| Output.pretty("{s}", .{this.horizontalEdge()}); + + if (i == this.column_inside_lengths.len - 1) { + Output.pretty("{s}\n", .{right_edge_separator}); + } + } + } + + pub fn printColumnNames(this: *const @This()) void { + for (this.column_inside_lengths, 0..) |column_inside_length, i| { + Output.pretty("{s}", .{this.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + Output.pretty("<" ++ column_color ++ ">{s}", .{this.column_names[i]}); + for (this.column_names[i].len..column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + if (i == this.column_inside_lengths.len - 1) { + Output.pretty("{s}\n", .{this.verticalEdge()}); + } + } + } + }; +} + +pub const OutdatedCommand = struct { + pub fn exec(ctx: Command.Context) !void { + const cli = try PackageManager.CommandLineArguments.parse(ctx.allocator, .outdated); + + const manager = PackageManager.init(ctx, cli, .outdated) catch |err| { + if (!cli.silent) { + if (err == error.MissingPackageJSON) { + Output.errGeneric("missing package.json, nothing outdated", .{}); + } + Output.errGeneric("failed to initialize bun install: {s}", .{@errorName(err)}); + } + + Global.crash(); + }; + + return switch (manager.options.log_level) { + inline else => |log_level| outdated(ctx, manager, log_level), + }; + } + + fn outdated(ctx: Command.Context, manager: *PackageManager, comptime log_level: PackageManager.Options.LogLevel) !void { + const load_lockfile_result = manager.lockfile.loadFromDisk( + manager, + manager.allocator, + manager.log, + manager.options.lockfile_path, + true, + ); + + const lockfile = switch (load_lockfile_result) { + .not_found => { + if (log_level != .silent) { + Output.errGeneric("missing lockfile, nothing outdated", .{}); + } + Global.crash(); + }, + .err => |cause| { + if (log_level != .silent) { + switch (cause.step) { + .open_file => Output.errGeneric("failed to open lockfile: {s}", .{ + @errorName(cause.value), + }), + .parse_file => Output.errGeneric("failed to parse lockfile: {s}", .{ + @errorName(cause.value), + }), + .read_file => Output.errGeneric("failed to read lockfile: {s}", .{ + @errorName(cause.value), + }), + .migrating => Output.errGeneric("failed to migrate lockfile: {s}", .{ + @errorName(cause.value), + }), + } + + if (ctx.log.hasErrors()) { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| try manager.log.printForLogLevelWithEnableAnsiColors( + Output.errorWriter(), + enable_ansi_colors, + ), + } + } + } + + Global.crash(); + }, + .ok => |ok| ok.lockfile, + }; + + manager.lockfile = lockfile; + + const root_pkg_id = manager.root_package_id.get(lockfile, manager.workspace_name_hash); + if (root_pkg_id == invalid_package_id) return; + const root_pkg_deps = lockfile.packages.items(.dependencies)[root_pkg_id]; + + Output.prettyErrorln("bun outdated v" ++ Global.package_json_version_with_sha ++ "", .{}); + Output.flush(); + + try updateManifestsIfNecessary(manager, log_level, root_pkg_deps); + + try switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| printOutdatedInfoTable(manager, root_pkg_deps, enable_ansi_colors), + }; + } + + fn printOutdatedInfoTable(manager: *PackageManager, root_pkg_deps: Lockfile.DependencySlice, comptime enable_ansi_colors: bool) !void { + var outdated_ids: std.ArrayListUnmanaged(struct { package_id: PackageID, dep_id: DependencyID }) = .{}; + defer outdated_ids.deinit(manager.allocator); + + var max_name: usize = 0; + var max_current: usize = 0; + var max_update: usize = 0; + var max_latest: usize = 0; + + const lockfile = manager.lockfile; + const string_buf = lockfile.buffers.string_bytes.items; + const dependencies = lockfile.buffers.dependencies.items; + const packages = lockfile.packages.slice(); + const pkg_names = packages.items(.name); + const pkg_resolutions = packages.items(.resolution); + + var version_buf = std.ArrayList(u8).init(bun.default_allocator); + defer version_buf.deinit(); + const version_writer = version_buf.writer(); + + for (root_pkg_deps.begin()..root_pkg_deps.end()) |dep_id| { + const package_id = lockfile.buffers.resolutions.items[dep_id]; + if (package_id == invalid_package_id) continue; + const dep = lockfile.buffers.dependencies.items[dep_id]; + if (dep.version.tag != .npm and dep.version.tag != .dist_tag) continue; + const resolution = pkg_resolutions[package_id]; + if (resolution.tag != .npm) continue; + + const package_name = pkg_names[package_id].slice(string_buf); + var expired = false; + const manifest = manager.manifests.byNameAllowExpired( + manager.scopeForPackageName(package_name), + package_name, + &expired, + ) orelse continue; + + const latest = manifest.findByDistTag("latest") orelse continue; + + const update_version = if (dep.version.tag == .npm) + manifest.findBestVersion(dep.version.value.npm.version, string_buf) orelse continue + else + manifest.findByDistTag(dep.version.value.dist_tag.tag.slice(string_buf)) orelse continue; + + if (resolution.value.npm.version.order(latest.version, string_buf, string_buf) != .lt) continue; + + const package_name_len = package_name.len + + if (dep.behavior.dev) + " (dev)".len + else if (dep.behavior.peer) + " (peer)".len + else if (dep.behavior.optional) + " (optional)".len + else + 0; + + if (package_name_len > max_name) max_name = package_name_len; + + version_writer.print("{}", .{resolution.value.npm.version.fmt(string_buf)}) catch bun.outOfMemory(); + if (version_buf.items.len > max_current) max_current = version_buf.items.len; + version_buf.clearRetainingCapacity(); + + version_writer.print("{}", .{update_version.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + if (version_buf.items.len > max_update) max_update = version_buf.items.len; + version_buf.clearRetainingCapacity(); + + version_writer.print("{}", .{latest.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + if (version_buf.items.len > max_latest) max_latest = version_buf.items.len; + version_buf.clearRetainingCapacity(); + + outdated_ids.append( + bun.default_allocator, + .{ .package_id = package_id, .dep_id = @intCast(dep_id) }, + ) catch bun.outOfMemory(); + } + + if (outdated_ids.items.len == 0) return; + + const package_column_inside_length = @max("Packages".len, max_name); + const current_column_inside_length = @max("Current".len, max_current); + const update_column_inside_length = @max("Update".len, max_update); + const latest_column_inside_length = @max("Latest".len, max_latest); + + const column_left_pad = 1; + const column_right_pad = 1; + + const table = Table(4, "blue", column_left_pad, column_right_pad, enable_ansi_colors).init( + [_][]const u8{ + "Packages", + "Current", + "Update", + "Latest", + }, + [_]usize{ + package_column_inside_length, + current_column_inside_length, + update_column_inside_length, + latest_column_inside_length, + }, + ); + + table.printTopLineSeparator(); + table.printColumnNames(); + + inline for ( + .{ + Behavior{ .normal = true }, + Behavior{ .dev = true }, + Behavior{ .peer = true }, + Behavior{ .optional = true }, + }, + ) |group_behavior| { + for (outdated_ids.items) |ids| { + const package_id = ids.package_id; + const dep_id = ids.dep_id; + + const dep = dependencies[dep_id]; + if (@as(u8, @bitCast(group_behavior)) & @as(u8, @bitCast(dep.behavior)) == 0) continue; + + const package_name = pkg_names[package_id].slice(string_buf); + const resolution = pkg_resolutions[package_id]; + + var expired = false; + const manifest = manager.manifests.byNameAllowExpired( + manager.scopeForPackageName(package_name), + package_name, + &expired, + ) orelse continue; + + const latest = manifest.findByDistTag("latest") orelse continue; + const update = if (dep.version.tag == .npm) + manifest.findBestVersion(dep.version.value.npm.version, string_buf) orelse continue + else + manifest.findByDistTag(dep.version.value.dist_tag.tag.slice(string_buf)) orelse continue; + + table.printLineSeparator(); + + { + // package name + const behavior_str = if (dep.behavior.dev) + " (dev)" + else if (dep.behavior.peer) + " (peer)" + else if (dep.behavior.optional) + " (optional)" + else + ""; + + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + Output.pretty("{s}{s}", .{ package_name, behavior_str }); + for (package_name.len + behavior_str.len..package_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + } + + { + // current version + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + version_writer.print("{}", .{resolution.value.npm.version.fmt(string_buf)}) catch bun.outOfMemory(); + Output.pretty("{s}", .{version_buf.items}); + for (version_buf.items.len..current_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + version_buf.clearRetainingCapacity(); + } + + { + // update version + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + version_writer.print("{}", .{update.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + Output.pretty("{s}", .{update.version.diffFmt(resolution.value.npm.version, manifest.string_buf, string_buf)}); + for (version_buf.items.len..update_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + version_buf.clearRetainingCapacity(); + } + + { + // latest version + Output.pretty("{s}", .{table.verticalEdge()}); + for (0..column_left_pad) |_| Output.pretty(" ", .{}); + + version_writer.print("{}", .{latest.version.fmt(manifest.string_buf)}) catch bun.outOfMemory(); + Output.pretty("{s}", .{latest.version.diffFmt(resolution.value.npm.version, manifest.string_buf, string_buf)}); + for (version_buf.items.len..latest_column_inside_length + column_right_pad) |_| Output.pretty(" ", .{}); + version_buf.clearRetainingCapacity(); + } + + Output.pretty("{s}\n", .{table.verticalEdge()}); + } + } + + table.printBottomLineSeparator(); + } + + fn updateManifestsIfNecessary(manager: *PackageManager, comptime log_level: PackageManager.Options.LogLevel, root_pkg_deps: Lockfile.DependencySlice) !void { + const lockfile = manager.lockfile; + const resolutions = lockfile.buffers.resolutions.items; + const dependencies = lockfile.buffers.dependencies.items; + const string_buf = lockfile.buffers.string_bytes.items; + const packages = lockfile.packages.slice(); + const pkg_resolutions = packages.items(.resolution); + const pkg_names = packages.items(.name); + + for (root_pkg_deps.begin()..root_pkg_deps.end()) |dep_id| { + if (dep_id >= dependencies.len) continue; + const package_id = resolutions[dep_id]; + if (package_id == invalid_package_id) continue; + const dep = dependencies[dep_id]; + if (dep.version.tag != .npm and dep.version.tag != .dist_tag) continue; + const resolution: Install.Resolution = pkg_resolutions[package_id]; + if (resolution.tag != .npm) continue; + + const package_name = pkg_names[package_id].slice(string_buf); + _ = manager.manifests.byName( + manager.scopeForPackageName(package_name), + package_name, + ) orelse { + const task_id = Install.Task.Id.forManifest(package_name); + if (manager.hasCreatedNetworkTask(task_id, dep.behavior.optional)) continue; + + manager.startProgressBarIfNone(); + + var task = manager.getNetworkTask(); + task.* = .{ + .package_manager = &PackageManager.instance, + .callback = undefined, + .task_id = task_id, + .allocator = manager.allocator, + }; + try task.forManifest( + package_name, + manager.allocator, + manager.scopeForPackageName(package_name), + null, + dep.behavior.optional, + ); + + manager.enqueueNetworkTask(task); + }; + } + + manager.flushNetworkQueue(); + _ = manager.scheduleTasks(); + + const RunClosure = struct { + manager: *PackageManager, + err: ?anyerror = null, + pub fn isDone(closure: *@This()) bool { + if (closure.manager.pendingTaskCount() > 0) { + closure.manager.runTasks( + *PackageManager, + closure.manager, + .{ + .onExtract = {}, + .onResolve = {}, + .onPackageManifestError = {}, + .onPackageDownloadError = {}, + .progress_bar = true, + .manifests_only = true, + }, + true, + log_level, + ) catch |err| { + closure.err = err; + return true; + }; + } + + return closure.manager.pendingTaskCount() == 0; + } + }; + + var run_closure: RunClosure = .{ .manager = manager }; + manager.sleepUntil(&run_closure, &RunClosure.isDone); + + if (comptime log_level.showProgress()) { + manager.endProgressBar(); + Output.flush(); + } + + if (run_closure.err) |err| { + return err; + } + } +}; diff --git a/src/deps/diffz/DiffMatchPatch.zig b/src/deps/diffz/DiffMatchPatch.zig index 98a1d6e22c..e6dc26c17c 100644 --- a/src/deps/diffz/DiffMatchPatch.zig +++ b/src/deps/diffz/DiffMatchPatch.zig @@ -432,24 +432,24 @@ fn diffBisect( after: []const u8, deadline: u64, ) DiffError!DiffList { - const before_length = @as(isize, @intCast(before.len)); - const after_length = @as(isize, @intCast(after.len)); - const max_d = @as(isize, @intCast((before.len + after.len + 1) / 2)); + const before_length: isize = @intCast(before.len); + const after_length: isize = @intCast(after.len); + const max_d: isize = @intCast((before.len + after.len + 1) / 2); const v_offset = max_d; const v_length = 2 * max_d; var v1 = try ArrayListUnmanaged(isize).initCapacity(allocator, @as(usize, @intCast(v_length))); - v1.items.len = @as(usize, @intCast(v_length)); + v1.items.len = @intCast(v_length); var v2 = try ArrayListUnmanaged(isize).initCapacity(allocator, @as(usize, @intCast(v_length))); - v2.items.len = @as(usize, @intCast(v_length)); + v2.items.len = @intCast(v_length); var x: usize = 0; while (x < v_length) : (x += 1) { v1.items[x] = -1; v2.items[x] = -1; } - v1.items[@as(usize, @intCast(v_offset + 1))] = 0; - v2.items[@as(usize, @intCast(v_offset + 1))] = 0; + v1.items[@intCast(v_offset + 1)] = 0; + v2.items[@intCast(v_offset + 1)] = 0; const delta = before_length - after_length; // If the total number of characters is odd, then the front path will // collide with the reverse path. @@ -474,20 +474,20 @@ fn diffBisect( const k1_offset = v_offset + k1; var x1: isize = 0; if (k1 == -d or (k1 != d and - v1.items[@as(usize, @intCast(k1_offset - 1))] < v1.items[@as(usize, @intCast(k1_offset + 1))])) + v1.items[@intCast(k1_offset - 1)] < v1.items[@intCast(k1_offset + 1)])) { - x1 = v1.items[@as(usize, @intCast(k1_offset + 1))]; + x1 = v1.items[@intCast(k1_offset + 1)]; } else { - x1 = v1.items[@as(usize, @intCast(k1_offset - 1))] + 1; + x1 = v1.items[@intCast(k1_offset - 1)] + 1; } var y1 = x1 - k1; while (x1 < before_length and - y1 < after_length and before[@as(usize, @intCast(x1))] == after[@as(usize, @intCast(y1))]) + y1 < after_length and before[@intCast(x1)] == after[@intCast(y1)]) { x1 += 1; y1 += 1; } - v1.items[@as(usize, @intCast(k1_offset))] = x1; + v1.items[@intCast(k1_offset)] = x1; if (x1 > before_length) { // Ran off the right of the graph. k1end += 2; @@ -496,9 +496,9 @@ fn diffBisect( k1start += 2; } else if (front) { const k2_offset = v_offset + delta - k1; - if (k2_offset >= 0 and k2_offset < v_length and v2.items[@as(usize, @intCast(k2_offset))] != -1) { + if (k2_offset >= 0 and k2_offset < v_length and v2.items[@intCast(k2_offset)] != -1) { // Mirror x2 onto top-left coordinate system. - const x2 = before_length - v2.items[@as(usize, @intCast(k2_offset))]; + const x2 = before_length - v2.items[@intCast(k2_offset)]; if (x1 >= x2) { // Overlap detected. return dmp.diffBisectSplit(allocator, before, after, x1, y1, deadline); @@ -513,21 +513,21 @@ fn diffBisect( const k2_offset = v_offset + k2; var x2: isize = 0; if (k2 == -d or (k2 != d and - v2.items[@as(usize, @intCast(k2_offset - 1))] < v2.items[@as(usize, @intCast(k2_offset + 1))])) + v2.items[@intCast(k2_offset - 1)] < v2.items[@intCast(k2_offset + 1)])) { - x2 = v2.items[@as(usize, @intCast(k2_offset + 1))]; + x2 = v2.items[@intCast(k2_offset + 1)]; } else { - x2 = v2.items[@as(usize, @intCast(k2_offset - 1))] + 1; + x2 = v2.items[@intCast(k2_offset - 1)] + 1; } var y2: isize = x2 - k2; while (x2 < before_length and y2 < after_length and - before[@as(usize, @intCast(before_length - x2 - 1))] == - after[@as(usize, @intCast(after_length - y2 - 1))]) + before[@intCast(before_length - x2 - 1)] == + after[@intCast(after_length - y2 - 1)]) { x2 += 1; y2 += 1; } - v2.items[@as(usize, @intCast(k2_offset))] = x2; + v2.items[@intCast(k2_offset)] = x2; if (x2 > before_length) { // Ran off the left of the graph. k2end += 2; @@ -536,11 +536,11 @@ fn diffBisect( k2start += 2; } else if (!front) { const k1_offset = v_offset + delta - k2; - if (k1_offset >= 0 and k1_offset < v_length and v1.items[@as(usize, @intCast(k1_offset))] != -1) { - const x1 = v1.items[@as(usize, @intCast(k1_offset))]; + if (k1_offset >= 0 and k1_offset < v_length and v1.items[@intCast(k1_offset)] != -1) { + const x1 = v1.items[@intCast(k1_offset)]; const y1 = v_offset + x1 - k1_offset; // Mirror x2 onto top-left coordinate system. - x2 = before_length - v2.items[@as(usize, @intCast(k2_offset))]; + x2 = before_length - v2.items[@intCast(k2_offset)]; if (x1 >= x2) { // Overlap detected. return dmp.diffBisectSplit(allocator, before, after, x1, y1, deadline); @@ -574,10 +574,10 @@ fn diffBisectSplit( y: isize, deadline: u64, ) DiffError!DiffList { - const text1a = text1[0..@as(usize, @intCast(x))]; - const text2a = text2[0..@as(usize, @intCast(y))]; - const text1b = text1[@as(usize, @intCast(x))..]; - const text2b = text2[@as(usize, @intCast(y))..]; + const text1a = text1[0..@intCast(x)]; + const text2a = text2[0..@intCast(y)]; + const text1b = text1[@intCast(x)..]; + const text2b = text2[@intCast(y)..]; // Compute both diffs serially. var diffs = try dmp.diffInternal(allocator, text1a, text2a, false, deadline); @@ -728,22 +728,22 @@ fn diffLinesToCharsMunge( // Modifying text would create many large strings to garbage collect. while (line_end < @as(isize, @intCast(text.len)) - 1) { line_end = b: { - break :b @as(isize, @intCast(std.mem.indexOf(u8, text[@as(usize, @intCast(line_start))..], "\n") orelse - break :b @as(isize, @intCast(text.len - 1)))) + line_start; + break :b @as(isize, @intCast(std.mem.indexOf(u8, text[@intCast(line_start)..], "\n") orelse + break :b @intCast(text.len - 1))) + line_start; }; - line = text[@as(usize, @intCast(line_start)) .. @as(usize, @intCast(line_start)) + @as(usize, @intCast(line_end + 1 - line_start))]; + line = text[@intCast(line_start) .. @as(usize, @intCast(line_start)) + @as(usize, @intCast(line_end + 1 - line_start))]; if (line_hash.get(line)) |value| { - try chars.append(allocator, @as(u8, @intCast(value))); + try chars.append(allocator, @intCast(value)); } else { if (line_array.items.len == max_lines) { // Bail out at 255 because char 256 == char 0. - line = text[@as(usize, @intCast(line_start))..]; - line_end = @as(isize, @intCast(text.len)); + line = text[@intCast(line_start)..]; + line_end = @intCast(text.len); } try line_array.append(allocator, line); try line_hash.put(allocator, line, line_array.items.len - 1); - try chars.append(allocator, @as(u8, @intCast(line_array.items.len - 1))); + try chars.append(allocator, @intCast(line_array.items.len - 1)); } line_start = line_end + 1; } @@ -818,8 +818,9 @@ fn diffCleanupMerge(allocator: std.mem.Allocator, diffs: *DiffList) DiffError!vo var nt = try allocator.alloc(u8, diffs.items[ii].text.len + common_length); // try diffs.items[pointer - count_delete - count_insert - 1].text.append(allocator, text_insert.items[0..common_length]); - bun.copy(u8, nt, diffs.items[ii].text); - bun.copy(u8, nt[diffs.items[ii].text.len..], text_insert.items[0..common_length]); + const ot = diffs.items[ii].text; + @memcpy(nt[0..ot.len], ot); + @memcpy(nt[ot.len..], text_insert.items[0..common_length]); // allocator.free(diffs.items[ii].text); diffs.items[ii].text = nt; @@ -871,8 +872,9 @@ fn diffCleanupMerge(allocator: std.mem.Allocator, diffs: *DiffList) DiffError!vo var nt = try allocator.alloc(u8, diffs.items[pointer - 1].text.len + diffs.items[pointer].text.len); // try diffs.items[pointer - count_delete - count_insert - 1].text.append(allocator, text_insert.items[0..common_length]); - bun.copy(u8, nt, diffs.items[pointer - 1].text); - bun.copy(u8, nt[diffs.items[pointer - 1].text.len..], diffs.items[pointer].text); + const ot = diffs.items[pointer - 1].text; + @memcpy(nt[0..ot.len], ot); + @memcpy(nt[ot.len..], diffs.items[pointer].text); // allocator.free(diffs.items[pointer - 1].text); diffs.items[pointer - 1].text = nt; @@ -980,18 +982,18 @@ fn diffCleanupSemantic(allocator: std.mem.Allocator, diffs: *DiffList) DiffError var length_insertions2: usize = 0; var length_deletions2: usize = 0; while (pointer < diffs.items.len) { - if (diffs.items[@as(usize, @intCast(pointer))].operation == .equal) { // Equality found. + if (diffs.items[@intCast(pointer)].operation == .equal) { // Equality found. try equalities.append(allocator, pointer); length_insertions1 = length_insertions2; length_deletions1 = length_deletions2; length_insertions2 = 0; length_deletions2 = 0; - last_equality = diffs.items[@as(usize, @intCast(pointer))].text; + last_equality = diffs.items[@intCast(pointer)].text; } else { // an insertion or deletion - if (diffs.items[@as(usize, @intCast(pointer))].operation == .insert) { - length_insertions2 += diffs.items[@as(usize, @intCast(pointer))].text.len; + if (diffs.items[@intCast(pointer)].operation == .insert) { + length_insertions2 += diffs.items[@intCast(pointer)].text.len; } else { - length_deletions2 += diffs.items[@as(usize, @intCast(pointer))].text.len; + length_deletions2 += diffs.items[@intCast(pointer)].text.len; } // Eliminate an equality that is smaller or equal to the edits on both // sides of it. @@ -1002,11 +1004,11 @@ fn diffCleanupSemantic(allocator: std.mem.Allocator, diffs: *DiffList) DiffError // Duplicate record. try diffs.insert( allocator, - @as(usize, @intCast(equalities.items[equalities.items.len - 1])), + @intCast(equalities.items[equalities.items.len - 1]), Diff.init(.delete, try allocator.dupe(u8, last_equality.?)), ); // Change second copy to insert. - diffs.items[@as(usize, @intCast(equalities.items[equalities.items.len - 1] + 1))].operation = .insert; + diffs.items[@intCast(equalities.items[equalities.items.len - 1] + 1)].operation = .insert; // Throw away the equality we just deleted. _ = equalities.pop(); if (equalities.items.len > 0) { @@ -1038,11 +1040,11 @@ fn diffCleanupSemantic(allocator: std.mem.Allocator, diffs: *DiffList) DiffError // Only extract an overlap if it is as big as the edit ahead or behind it. pointer = 1; while (pointer < diffs.items.len) { - if (diffs.items[@as(usize, @intCast(pointer - 1))].operation == .delete and - diffs.items[@as(usize, @intCast(pointer))].operation == .insert) + if (diffs.items[@intCast(pointer - 1)].operation == .delete and + diffs.items[@intCast(pointer)].operation == .insert) { - const deletion = diffs.items[@as(usize, @intCast(pointer - 1))].text; - const insertion = diffs.items[@as(usize, @intCast(pointer))].text; + const deletion = diffs.items[@intCast(pointer - 1)].text; + const insertion = diffs.items[@intCast(pointer)].text; const overlap_length1: usize = diffCommonOverlap(deletion, insertion); const overlap_length2: usize = diffCommonOverlap(insertion, deletion); if (overlap_length1 >= overlap_length2) { @@ -1053,12 +1055,12 @@ fn diffCleanupSemantic(allocator: std.mem.Allocator, diffs: *DiffList) DiffError // Insert an equality and trim the surrounding edits. try diffs.insert( allocator, - @as(usize, @intCast(pointer)), + @intCast(pointer), Diff.init(.equal, try allocator.dupe(u8, insertion[0..overlap_length1])), ); - diffs.items[@as(usize, @intCast(pointer - 1))].text = + diffs.items[@intCast(pointer - 1)].text = try allocator.dupe(u8, deletion[0 .. deletion.len - overlap_length1]); - diffs.items[@as(usize, @intCast(pointer + 1))].text = + diffs.items[@intCast(pointer + 1)].text = try allocator.dupe(u8, insertion[overlap_length1..]); pointer += 1; } @@ -1070,14 +1072,14 @@ fn diffCleanupSemantic(allocator: std.mem.Allocator, diffs: *DiffList) DiffError // Insert an equality and swap and trim the surrounding edits. try diffs.insert( allocator, - @as(usize, @intCast(pointer)), + @intCast(pointer), Diff.init(.equal, try allocator.dupe(u8, deletion[0..overlap_length2])), ); - diffs.items[@as(usize, @intCast(pointer - 1))].operation = .insert; - diffs.items[@as(usize, @intCast(pointer - 1))].text = + diffs.items[@intCast(pointer - 1)].operation = .insert; + diffs.items[@intCast(pointer - 1)].text = try allocator.dupe(u8, insertion[0 .. insertion.len - overlap_length2]); - diffs.items[@as(usize, @intCast(pointer + 1))].operation = .delete; - diffs.items[@as(usize, @intCast(pointer + 1))].text = + diffs.items[@intCast(pointer + 1)].operation = .delete; + diffs.items[@intCast(pointer + 1)].text = try allocator.dupe(u8, deletion[overlap_length2..]); pointer += 1; } diff --git a/src/install/install.zig b/src/install/install.zig index 0f73dffe9d..6150c3a25f 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -141,7 +141,7 @@ const GlobalStringBuilder = @import("../string_builder.zig"); const SlicedString = Semver.SlicedString; const Repository = @import("./repository.zig").Repository; const Bin = @import("./bin.zig").Bin; -const Dependency = @import("./dependency.zig"); +pub const Dependency = @import("./dependency.zig"); const Behavior = @import("./dependency.zig").Behavior; const FolderResolution = @import("./resolvers/folder_resolver.zig").FolderResolution; @@ -5625,7 +5625,7 @@ pub const PackageManager = struct { } } - fn flushNetworkQueue(this: *PackageManager) void { + pub fn flushNetworkQueue(this: *PackageManager) void { var network = &this.network_task_fifo; while (network.readItem()) |network_task| { @@ -6323,6 +6323,10 @@ pub const PackageManager = struct { manager.getCacheDirectory(), ); + if (@hasField(@TypeOf(callbacks), "manifests_only") and callbacks.manifests_only) { + continue; + } + const dependency_list_entry = manager.task_queue.getEntry(task.task_id).?; const dependency_list = dependency_list_entry.value_ptr.*; @@ -6564,6 +6568,10 @@ pub const PackageManager = struct { try manager.manifests.insert(manifest.pkg.name.hash, manifest); + if (@hasField(@TypeOf(callbacks), "manifests_only") and callbacks.manifests_only) { + continue; + } + const dependency_list_entry = manager.task_queue.getEntry(task.id).?; const dependency_list = dependency_list_entry.value_ptr.*; dependency_list_entry.value_ptr.* = .{}; @@ -8191,6 +8199,7 @@ pub const PackageManager = struct { unlink, patch, @"patch-commit", + outdated, pub fn canGloballyInstallPackages(this: Subcommand) bool { return switch (this) { @@ -9063,7 +9072,6 @@ pub const PackageManager = struct { clap.parseParam("--backend Platform-specific optimizations for installing dependencies. " ++ platform_specific_backend_label) catch unreachable, clap.parseParam("--link-native-bins ... Link \"bin\" from a matching platform-specific \"optionalDependencies\" instead. Default: esbuild, turbo") catch unreachable, clap.parseParam("--concurrent-scripts Maximum number of concurrent jobs for lifecycle scripts (default 5)") catch unreachable, - // clap.parseParam("--omit ... Skip installing dependencies of a certain type. \"dev\", \"optional\", or \"peer\"") catch unreachable, clap.parseParam("-h, --help Print this help menu") catch unreachable, }; @@ -9143,6 +9151,7 @@ pub const PackageManager = struct { trusted: bool = false, no_summary: bool = false, latest: bool = false, + // json_output: bool = false, link_native_bins: []const string = &[_]string{}, @@ -9372,6 +9381,24 @@ pub const PackageManager = struct { Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); Output.flush(); }, + .outdated => { + const intro_text = + \\Usage: bun outdated [flags] + ; + + const outro_text = + \\Examples: + \\ bun outdated + \\ + ; + + Output.pretty("\n" ++ intro_text ++ "\n", .{}); + Output.flush(); + Output.pretty("\nFlags:", .{}); + clap.simpleHelp(PackageManager.install_params); + Output.pretty("\n\n" ++ outro_text ++ "\n", .{}); + Output.flush(); + }, } } @@ -9388,6 +9415,7 @@ pub const PackageManager = struct { .unlink => unlink_params, .patch => patch_params, .@"patch-commit" => patch_commit_params, + .outdated => install_params, }; var diag = clap.Diagnostic{}; @@ -9423,6 +9451,12 @@ pub const PackageManager = struct { cli.trusted = args.flag("--trust"); cli.no_summary = args.flag("--no-summary"); + if (comptime subcommand == .outdated) { + // fake --dry-run, we don't actually resolve+clean the lockfile + cli.dry_run = true; + // cli.json_output = args.flag("--json"); + } + // link and unlink default to not saving, all others default to // saving. if (comptime subcommand == .link or subcommand == .unlink) { @@ -9453,8 +9487,6 @@ pub const PackageManager = struct { }; } - if (comptime subcommand == .@"patch-commit") {} - if (args.option("--config")) |opt| { cli.config = opt; } @@ -9468,23 +9500,9 @@ pub const PackageManager = struct { } if (args.option("--concurrent-scripts")) |concurrency| { - // var buf: [] cli.concurrent_scripts = std.fmt.parseInt(usize, concurrency, 10) catch null; } - // for (args.options("--omit")) |omit| { - // if (strings.eqlComptime(omit, "dev")) { - // cli.omit.dev = true; - // } else if (strings.eqlComptime(omit, "optional")) { - // cli.omit.optional = true; - // } else if (strings.eqlComptime(omit, "peer")) { - // cli.omit.peer = true; - // } else { - // Output.prettyErrorln("error: Invalid argument \"--omit\" must be one of \"dev\", \"optional\", or \"peer\". ", .{}); - // Global.crash(); - // } - // } - if (args.option("--cwd")) |cwd_| { var buf: bun.PathBuffer = undefined; var buf2: bun.PathBuffer = undefined; @@ -9701,15 +9719,15 @@ pub const PackageManager = struct { if (err == error.MissingPackageJSON) { switch (subcommand) { .update => { - Output.prettyErrorln("No package.json, so nothing to update\n", .{}); + Output.prettyErrorln("No package.json, so nothing to update", .{}); Global.crash(); }, .remove => { - Output.prettyErrorln("No package.json, so nothing to remove\n", .{}); + Output.prettyErrorln("No package.json, so nothing to remove", .{}); Global.crash(); }, .patch, .@"patch-commit" => { - Output.prettyErrorln("No package.json, so nothing to patch\n", .{}); + Output.prettyErrorln("No package.json, so nothing to patch", .{}); Global.crash(); }, else => { diff --git a/src/install/npm.zig b/src/install/npm.zig index b5c3aafea4..b3bf80432d 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -909,10 +909,14 @@ pub const PackageManifest = struct { .{ .mode = .read_only }, ) catch return null; defer cache_file.close(); - const bytes = try cache_file.readToEndAllocOptions( + return loadByFile(allocator, scope, cache_file); + } + + pub fn loadByFile(allocator: std.mem.Allocator, scope: *const Registry.Scope, manifest_file: std.fs.File) !?PackageManifest { + const bytes = try manifest_file.readToEndAllocOptions( allocator, std.math.maxInt(u32), - cache_file.getEndPos() catch null, + manifest_file.getEndPos() catch null, @alignOf(u8), null, ); @@ -960,6 +964,96 @@ pub const PackageManifest = struct { } }; + pub const bindings = struct { + const JSC = bun.JSC; + const JSValue = JSC.JSValue; + const JSGlobalObject = JSC.JSGlobalObject; + const CallFrame = JSC.CallFrame; + const ZigString = JSC.ZigString; + + pub fn generate(global: *JSGlobalObject) JSValue { + const obj = JSValue.createEmptyObject(global, 1); + const parseManifestString = ZigString.static("parseManifest"); + obj.put(global, parseManifestString, JSC.createCallback(global, parseManifestString, 2, jsParseManifest)); + return obj; + } + + pub fn jsParseManifest(global: *JSGlobalObject, callFrame: *CallFrame) JSValue { + const args = callFrame.arguments(2).slice(); + if (args.len < 2 or !args[0].isString() or !args[1].isString()) { + global.throw("expected manifest filename and registry string arguments", .{}); + return .zero; + } + + const manifest_filename_str = args[0].toBunString(global); + defer manifest_filename_str.deref(); + + const manifest_filename = manifest_filename_str.toUTF8(bun.default_allocator); + defer manifest_filename.deinit(); + + const registry_str = args[1].toBunString(global); + defer registry_str.deref(); + + const registry = registry_str.toUTF8(bun.default_allocator); + defer registry.deinit(); + + const manifest_file = std.fs.openFileAbsolute(manifest_filename.slice(), .{}) catch |err| { + global.throw("failed to open manifest file \"{s}\": {s}", .{ manifest_filename.slice(), @errorName(err) }); + return .zero; + }; + defer manifest_file.close(); + + const scope: Registry.Scope = .{ + .url_hash = Registry.Scope.hash(strings.withoutTrailingSlash(registry.slice())), + .url = .{ + .host = strings.withoutTrailingSlash(strings.withoutPrefixComptime(registry.slice(), "http://")), + .hostname = strings.withoutTrailingSlash(strings.withoutPrefixComptime(registry.slice(), "http://")), + .href = registry.slice(), + .origin = strings.withoutTrailingSlash(registry.slice()), + .protocol = if (strings.indexOfChar(registry.slice(), ':')) |colon| registry.slice()[0..colon] else "", + }, + }; + + const maybe_package_manifest = Serializer.loadByFile(bun.default_allocator, &scope, manifest_file) catch |err| { + global.throw("failed to load manifest file: {s}", .{@errorName(err)}); + return .zero; + }; + + const package_manifest: PackageManifest = maybe_package_manifest orelse { + global.throw("manifest is invalid ", .{}); + return .zero; + }; + + var buf: std.ArrayListUnmanaged(u8) = .{}; + const writer = buf.writer(bun.default_allocator); + + // TODO: we can add more information. for now just versions is fine + + writer.print("{{\"name\":\"{s}\",\"versions\":[", .{package_manifest.name()}) catch { + global.throwOutOfMemory(); + return .zero; + }; + + for (package_manifest.versions, 0..) |version, i| { + if (i == package_manifest.versions.len - 1) + writer.print("\"{}\"]}}", .{version.fmt(package_manifest.string_buf)}) catch { + global.throwOutOfMemory(); + return .zero; + } + else + writer.print("\"{}\",", .{version.fmt(package_manifest.string_buf)}) catch { + global.throwOutOfMemory(); + return .zero; + }; + } + + var result = bun.String.fromUTF8(buf.items); + defer result.deref(); + + return result.toJSByParseJSON(global); + } + }; + pub fn str(self: *const PackageManifest, external: *const ExternalString) string { return external.slice(self.string_buf); } diff --git a/src/install/semver.zig b/src/install/semver.zig index 715cbb284b..7cf7804280 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -626,10 +626,175 @@ pub const Version = extern struct { return this.tag.build.len + this.tag.pre.len; } + pub const Formatter = struct { + version: Version, + input: string, + + pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { + const self = formatter.version; + try std.fmt.format(writer, "{?d}.{?d}.{?d}", .{ self.major, self.minor, self.patch }); + + if (self.tag.hasPre()) { + const pre = self.tag.pre.slice(formatter.input); + try writer.writeAll("-"); + try writer.writeAll(pre); + } + + if (self.tag.hasBuild()) { + const build = self.tag.build.slice(formatter.input); + try writer.writeAll("+"); + try writer.writeAll(build); + } + } + }; + pub fn fmt(this: Version, input: string) Formatter { return .{ .version = this, .input = input }; } + pub const DiffFormatter = struct { + version: Version, + buf: string, + other: Version, + other_buf: string, + + pub fn format(this: DiffFormatter, comptime fmt_: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + if (!Output.enable_ansi_colors) { + // print normally if no colors + const formatter: Formatter = .{ .version = this.version, .input = this.buf }; + return Formatter.format(formatter, fmt_, options, writer); + } + + const diff = this.version.whichVersionIsDifferent(this.other, this.buf, this.other_buf) orelse .none; + + switch (diff) { + .major => try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }), + .minor => { + if (this.version.major == 0) { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } else { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } + }, + .patch => { + if (this.version.major == 0 and this.version.minor == 0) { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } else { + try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }); + } + }, + .none, .pre, .build => try writer.print(Output.prettyFmt("{d}.{d}.{d}", true), .{ + this.version.major, this.version.minor, this.version.patch, + }), + } + + // might be pre or build. loop through all characters, and insert on + // first diff. + + var set_color = false; + if (this.version.tag.hasPre()) { + if (this.other.tag.hasPre()) { + const pre = this.version.tag.pre.slice(this.buf); + const other_pre = this.other.tag.pre.slice(this.other_buf); + + var first = true; + for (pre, 0..) |c, i| { + if (!set_color and i < other_pre.len and c != other_pre[i]) { + set_color = true; + try writer.writeAll(Output.prettyFmt("", true)); + } + if (first) { + first = false; + try writer.writeByte('-'); + } + try writer.writeByte(c); + } + } else { + try writer.print(Output.prettyFmt("-{}", true), .{this.version.tag.pre.fmt(this.buf)}); + set_color = true; + } + } + + if (this.version.tag.hasBuild()) { + if (this.other.tag.hasBuild()) { + const build = this.version.tag.build.slice(this.buf); + const other_build = this.other.tag.build.slice(this.other_buf); + + var first = true; + for (build, 0..) |c, i| { + if (!set_color and i < other_build.len and c != other_build[i]) { + set_color = true; + try writer.writeAll(Output.prettyFmt("", true)); + } + if (first) { + first = false; + try writer.writeByte('+'); + } + try writer.writeByte(c); + } + } else { + if (!set_color) { + try writer.print(Output.prettyFmt("+{}", true), .{this.version.tag.build.fmt(this.buf)}); + } else { + try writer.print("+{}", .{this.version.tag.build.fmt(this.other_buf)}); + } + } + } + + try writer.writeAll(Output.prettyFmt("", true)); + } + }; + + pub fn diffFmt(this: Version, other: Version, this_buf: string, other_buf: string) DiffFormatter { + return .{ + .version = this, + .buf = this_buf, + .other = other, + .other_buf = other_buf, + }; + } + + pub const ChangedVersion = enum { + major, + minor, + patch, + pre, + build, + none, + }; + + pub fn whichVersionIsDifferent( + left: Version, + right: Version, + left_buf: string, + right_buf: string, + ) ?ChangedVersion { + if (left.major != right.major) return .major; + if (left.minor != right.minor) return .minor; + if (left.patch != right.patch) return .patch; + + if (left.tag.hasPre() != right.tag.hasPre()) return .pre; + if (!left.tag.hasPre() and !right.tag.hasPre()) return null; + if (left.tag.orderPre(right.tag, left_buf, right_buf) != .eq) return .pre; + + if (left.tag.hasBuild() != right.tag.hasBuild()) return .build; + if (!left.tag.hasBuild() and !right.tag.hasBuild()) return null; + return if (left.tag.build.order(&right.tag.build, left_buf, right_buf) != .eq) + .build + else + null; + } + pub fn count(this: *const Version, buf: []const u8, comptime StringBuilder: type, builder: StringBuilder) void { if (this.tag.hasPre() and !this.tag.pre.isInline()) builder.count(this.tag.pre.slice(buf)); if (this.tag.hasBuild() and !this.tag.build.isInline()) builder.count(this.tag.build.slice(buf)); @@ -689,28 +854,6 @@ pub const Version = extern struct { return bun.Wyhash.hash(0, bytes); } - pub const Formatter = struct { - version: Version, - input: string, - - pub fn format(formatter: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void { - const self = formatter.version; - try std.fmt.format(writer, "{?d}.{?d}.{?d}", .{ self.major, self.minor, self.patch }); - - if (self.tag.hasPre()) { - const pre = self.tag.pre.slice(formatter.input); - try writer.writeAll("-"); - try writer.writeAll(pre); - } - - if (self.tag.hasBuild()) { - const build = self.tag.build.slice(formatter.input); - try writer.writeAll("+"); - try writer.writeAll(build); - } - } - }; - pub fn eql(lhs: Version, rhs: Version) bool { return lhs.major == rhs.major and lhs.minor == rhs.minor and lhs.patch == rhs.patch and rhs.tag.eql(lhs.tag); } diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index 002bee9cf9..e51917e33c 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -97,3 +97,10 @@ export const setSyntheticAllocationLimitForTesting: (limit: number) => number = "Bun__setSyntheticAllocationLimitForTesting", 1, ); + +export const npm_manifest_test_helpers = $zig("npm.zig", "PackageManifest.bindings.generate") as { + /** + * Returns the parsed manifest file. Currently only returns an array of available versions. + */ + parseManifest: (manifestFileName: string, registryUrl: string) => any; +}; diff --git a/test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap b/test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap index 0111493eb1..a505c6d547 100644 --- a/test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap +++ b/test/cli/install/registry/__snapshots__/bun-install-registry.test.ts.snap @@ -45,3 +45,84 @@ what-bin@1.0.0: integrity sha512-sa99On1k5aDqCvpni/TQ6rLzYprUWBlb8fNwWOzbjDlM24fRr7FKDOuaBO/Y9WEIcZuzoPkCW5EkBCpflj8REQ== " `; + +exports[`outdated NO_COLOR works 1`] = ` +"|--------------------------------------| +| Packages | Current | Update | Latest | +|----------|---------|--------|--------| +| a-dep | 1.0.1 | 1.0.1 | 1.0.10 | +|--------------------------------------| +" +`; + +exports[`outdated normal dep, smaller than column title 1`] = ` +"┌──────────┬─────────┬────────┬────────┐ +│ Packages │ Current │ Update │ Latest │ +├──────────┼─────────┼────────┼────────┤ +│ no-deps │ 1.0.0 │ 1.0.0 │ 2.0.0 │ +└──────────┴─────────┴────────┴────────┘ +" +`; + +exports[`outdated normal dep, larger than column title 1`] = ` +"┌───────────────┬────────────────┬────────────────┬────────────────┐ +│ Packages │ Current │ Update │ Latest │ +├───────────────┼────────────────┼────────────────┼────────────────┤ +│ prereleases-1 │ 1.0.0-future.1 │ 1.0.0-future.1 │ 1.0.0-future.4 │ +└───────────────┴────────────────┴────────────────┴────────────────┘ +" +`; + +exports[`outdated dev dep, smaller than column title 1`] = ` +"┌───────────────┬─────────┬────────┬────────┐ +│ Packages │ Current │ Update │ Latest │ +├───────────────┼─────────┼────────┼────────┤ +│ no-deps (dev) │ 1.0.0 │ 1.0.0 │ 2.0.0 │ +└───────────────┴─────────┴────────┴────────┘ +" +`; + +exports[`outdated dev dep, larger than column title 1`] = ` +"┌─────────────────────┬────────────────┬────────────────┬────────────────┐ +│ Packages │ Current │ Update │ Latest │ +├─────────────────────┼────────────────┼────────────────┼────────────────┤ +│ prereleases-1 (dev) │ 1.0.0-future.1 │ 1.0.0-future.1 │ 1.0.0-future.4 │ +└─────────────────────┴────────────────┴────────────────┴────────────────┘ +" +`; + +exports[`outdated peer dep, smaller than column title 1`] = ` +"┌────────────────┬─────────┬────────┬────────┐ +│ Packages │ Current │ Update │ Latest │ +├────────────────┼─────────┼────────┼────────┤ +│ no-deps (peer) │ 1.0.0 │ 1.0.0 │ 2.0.0 │ +└────────────────┴─────────┴────────┴────────┘ +" +`; + +exports[`outdated peer dep, larger than column title 1`] = ` +"┌──────────────────────┬────────────────┬────────────────┬────────────────┐ +│ Packages │ Current │ Update │ Latest │ +├──────────────────────┼────────────────┼────────────────┼────────────────┤ +│ prereleases-1 (peer) │ 1.0.0-future.1 │ 1.0.0-future.1 │ 1.0.0-future.4 │ +└──────────────────────┴────────────────┴────────────────┴────────────────┘ +" +`; + +exports[`outdated optional dep, smaller than column title 1`] = ` +"┌────────────────────┬─────────┬────────┬────────┐ +│ Packages │ Current │ Update │ Latest │ +├────────────────────┼─────────┼────────┼────────┤ +│ no-deps (optional) │ 1.0.0 │ 1.0.0 │ 2.0.0 │ +└────────────────────┴─────────┴────────┴────────┘ +" +`; + +exports[`outdated optional dep, larger than column title 1`] = ` +"┌──────────────────────────┬────────────────┬────────────────┬────────────────┐ +│ Packages │ Current │ Update │ Latest │ +├──────────────────────────┼────────────────┼────────────────┼────────────────┤ +│ prereleases-1 (optional) │ 1.0.0-future.1 │ 1.0.0-future.1 │ 1.0.0-future.4 │ +└──────────────────────────┴────────────────┴────────────────┴────────────────┘ +" +`; diff --git a/test/cli/install/registry/bun-install-registry.test.ts b/test/cli/install/registry/bun-install-registry.test.ts index e9c618def7..d2af18d59e 100644 --- a/test/cli/install/registry/bun-install-registry.test.ts +++ b/test/cli/install/registry/bun-install-registry.test.ts @@ -14,6 +14,7 @@ import { tempDirWithFiles, randomPort, mergeWindowEnvs, + assertManifestsPopulated, } from "harness"; import { join, sep, resolve } from "path"; import { mkdirSync, copyFileSync } from "fs"; @@ -93,12 +94,16 @@ beforeEach(async () => { join(packageDir, "bunfig.toml"), ` [install] -cache = false +cache = "${join(packageDir, ".bun-cache")}" registry = "http://localhost:${port}/" `, ); }); +function registryUrl() { + return `http://localhost:${port}/`; +} + /** * Returns auth token */ @@ -436,6 +441,7 @@ describe("package.json indentation", async () => { }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const rootPackageJson = await file(join(packageDir, "package.json")).text(); @@ -453,6 +459,7 @@ describe("package.json indentation", async () => { })); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "package.json")).text()).toBe(rootPackageJson); const workspacePackageJson = await file(join(packageDir, "packages", "bar", "package.json")).text(); @@ -484,12 +491,8 @@ describe("optionalDependencies", () => { `${optional ? "warn" : "error"}: GET http://localhost:${port}/missing-tarball/-/missing-tarball-1.0.0.tgz - `, ); expect(await exited).toBe(optional ? 0 : 1); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".bin", - ".cache", - "uses-what-bin", - "what-bin", - ]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "uses-what-bin", "what-bin"]); expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); }); } @@ -510,6 +513,7 @@ describe("optionalDependencies", () => { allowWarnings: true, savesLockfile: !rootOptional, }); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(err).toMatch(`warn: GET http://localhost:${port}/this-package-does-not-exist-in-the-registry - 404`); }); @@ -532,6 +536,8 @@ test("tarball override does not crash", async () => { await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ name: "no-deps", version: "2.0.0", @@ -571,6 +577,7 @@ describe.each(["--production", "without --production"])("%s", flag => { }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const initialHash = Bun.hash(await file(join(packageDir, "bun.lockb")).arrayBuffer()); @@ -589,6 +596,7 @@ describe.each(["--production", "without --production"])("%s", flag => { }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "node_modules", "bin-change-dir", "package.json")).json()).toMatchObject({ name: "bin-change-dir", @@ -605,6 +613,7 @@ describe.each(["--production", "without --production"])("%s", flag => { }); expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); // We should not have saved bun.lockb expect(Bun.hash(await file(join(packageDir, "bun.lockb")).arrayBuffer())).toBe(initialHash); @@ -626,6 +635,7 @@ describe.each(["--production", "without --production"])("%s", flag => { }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); // We should not have saved bun.lockb expect(Bun.hash(await file(join(packageDir, "bun.lockb")).arrayBuffer())).toBe(initialHash); @@ -668,6 +678,7 @@ describe.each(["--production", "without --production"])("%s", flag => { env, }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await Promise.all([ (async () => @@ -702,6 +713,8 @@ describe.each(["--production", "without --production"])("%s", flag => { }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "bin-change-dir", "package.json")).json()).toMatchObject({ name: "bin-change-dir", version: prod ? "1.0.0" : "1.0.1", @@ -773,6 +786,7 @@ test("hardlinks on windows dont fail with long paths", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("basic 1", async () => { @@ -811,6 +825,8 @@ test("basic 1", async () => { } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); ({ stdout, stderr, exited } = spawn({ @@ -834,6 +850,7 @@ test("basic 1", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("manifest cache will invalidate when registry changes", async () => { @@ -930,6 +947,7 @@ test("dependency from root satisfies range from dependency", async () => { version: "1.0.0", } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); @@ -955,6 +973,7 @@ test("dependency from root satisfies range from dependency", async () => { "2 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("duplicate names and versions in a manifest do not install incorrect packages", async () => { @@ -987,6 +1006,8 @@ test("duplicate names and versions in a manifest do not install incorrect packag ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); const results = await Promise.all([ @@ -1028,6 +1049,8 @@ describe("peerDependency index out of bounds", async () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); const results = await Promise.all([ @@ -1044,6 +1067,7 @@ describe("peerDependency index out of bounds", async () => { await Promise.all([ rm(join(packageDir, "node_modules"), { recursive: true, force: true }), + rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }), write( join(packageDir, "package.json"), JSON.stringify({ @@ -1056,6 +1080,8 @@ describe("peerDependency index out of bounds", async () => { ]); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const newLockfile = parseLockfile(packageDir); expect(newLockfile).toMatchNodeModulesAt(packageDir); const newResults = await Promise.all([ @@ -1091,6 +1117,7 @@ describe("peerDependency index out of bounds", async () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); // update version and delete node_modules and cache await Promise.all([ @@ -1105,10 +1132,13 @@ describe("peerDependency index out of bounds", async () => { }), ), rm(join(packageDir, "node_modules"), { recursive: true, force: true }), + rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }), ]); // this install would trigger the index out of bounds error await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); }); @@ -1153,6 +1183,7 @@ test("peerDependency in child npm dependency should not maintain old version whe version: "1.0.0", } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await writeFile( join(packageDir, "package.json"), @@ -1191,6 +1222,7 @@ test("peerDependency in child npm dependency should not maintain old version whe "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("package added after install", async () => { @@ -1230,6 +1262,7 @@ test("package added after install", async () => { version: "1.1.0", } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); // add `no-deps` to root package.json with a smaller but still compatible // version for `one-range-dep`. @@ -1276,6 +1309,7 @@ test("package added after install", async () => { version: "1.1.0", } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); @@ -1301,6 +1335,7 @@ test("package added after install", async () => { "3 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("--production excludes devDependencies in workspaces", async () => { @@ -1345,11 +1380,13 @@ test("--production excludes devDependencies in workspaces", async () => { // without lockfile const expectedResults = [ - [".cache", "a-dep", "no-deps", "pkg1", "pkg2"], + ["a-dep", "no-deps", "pkg1", "pkg2"], { name: "no-deps", version: "1.0.0" }, { name: "a-dep", version: "1.0.2" }, ]; let { out } = await runBunInstall(env, packageDir, { production: true }); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ no-deps@1.0.0", @@ -1367,6 +1404,8 @@ test("--production excludes devDependencies in workspaces", async () => { // create non-production lockfile, then install with --production await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); ({ out } = await runBunInstall(env, packageDir)); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ a1@1.0.0", @@ -1376,6 +1415,8 @@ test("--production excludes devDependencies in workspaces", async () => { ]); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); ({ out } = await runBunInstall(env, packageDir, { production: true })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ no-deps@1.0.0", @@ -1423,6 +1464,7 @@ test("--production without a lockfile will install and not save lockfile", async "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await exists(join(packageDir, "node_modules", "no-deps", "index.js"))).toBeTrue(); }); @@ -1445,6 +1487,8 @@ describe("binaries", () => { ]); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(join(packageDir, "node_modules", ".bin", "what-bin")).toBeValidBin( join("..", "what-bin", "what-bin.js"), ); @@ -1487,6 +1531,7 @@ describe("binaries", () => { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); @@ -1515,6 +1560,7 @@ describe("binaries", () => { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("will link binaries for packages installed multiple times", async () => { @@ -1556,6 +1602,8 @@ describe("binaries", () => { // instead of using the root version (`1.5.0`). await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const results = await Promise.all([ file(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt")).text(), file(join(packageDir, "packages", "pkg1", "node_modules", "uses-what-bin", "what-bin.txt")).text(), @@ -1601,6 +1649,8 @@ describe("binaries", () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "bin-1.0.0.txt")).text()).toEqual("success!"); expect(await exists(join(packageDir, "bin-1.0.1.txt"))).toBeFalse(); @@ -1639,6 +1689,8 @@ describe("binaries", () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "bin-1.0.0.txt")).text()).toEqual("success!"); expect(await file(join(packageDir, "bin-1.0.1.txt")).text()).toEqual("success!"); }); @@ -1775,6 +1827,7 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); let lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); @@ -1817,6 +1870,7 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); @@ -1857,10 +1911,11 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); } // delete cache - await rm(join(packageDir, "node_modules", ".cache"), { recursive: true, force: true }); + await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); ({ stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], @@ -1881,10 +1936,11 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy expect.stringContaining("Checked 19 installs across 23 packages (no changes)"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); // delete bun.lockb and cache await rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }); - await rm(join(packageDir, "node_modules", ".cache"), { recursive: true, force: true }); + await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); ({ stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], @@ -1896,6 +1952,7 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy })); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); [err, out] = await Promise.all([new Response(stderr).text(), new Response(stdout).text()]); @@ -2016,6 +2073,8 @@ describe("hoisting", async () => { expect(out).toContain(`+ ${dep}@${dependencies[dep]}`); } expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); await rm(join(packageDir, "bun.lockb")); @@ -2174,6 +2233,8 @@ describe("hoisting", async () => { expect(out).toContain(`+ ${dep}@${dependencies[dep]}`); } expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); await rm(join(packageDir, "bun.lockb")); @@ -2197,6 +2258,8 @@ describe("hoisting", async () => { } expect(out).not.toContain("package installed"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); @@ -2217,6 +2280,8 @@ describe("hoisting", async () => { expect(err).not.toContain("error:"); expect(out).not.toContain("package installed"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); }); } @@ -2259,6 +2324,8 @@ describe("hoisting", async () => { ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", version: "1.0.0", @@ -2306,6 +2373,8 @@ describe("hoisting", async () => { ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", version: "2.0.0", @@ -2384,6 +2453,7 @@ describe("hoisting", async () => { version: "2.0.0", }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("hoisting/using incorrect peer dep on initial install", async () => { @@ -2423,6 +2493,8 @@ describe("hoisting", async () => { ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", version: "2.0.0", @@ -2470,6 +2542,8 @@ describe("hoisting", async () => { ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", version: "1.0.0", @@ -2519,6 +2593,7 @@ describe("hoisting", async () => { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", @@ -2581,6 +2656,7 @@ describe("hoisting", async () => { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", @@ -2641,6 +2717,7 @@ describe("hoisting", async () => { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", @@ -2692,6 +2769,7 @@ describe("hoisting", async () => { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", @@ -2742,6 +2820,8 @@ describe("workspaces", async () => { "2 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "root", workspaces: ["foo"], @@ -2767,6 +2847,8 @@ describe("workspaces", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "foo", "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -2793,13 +2875,9 @@ describe("workspaces", async () => { "3 packages installed", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".bin", - ".cache", - "foo", - "no-deps", - "what-bin", - ]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "foo", "no-deps", "what-bin"]); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); await rm(join(packageDir, "bun.lockb")); @@ -2819,13 +2897,9 @@ describe("workspaces", async () => { "3 packages installed", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".bin", - ".cache", - "foo", - "no-deps", - "what-bin", - ]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "foo", "no-deps", "what-bin"]); }); test("adding packages in workspaces", async () => { await writeFile( @@ -2875,6 +2949,8 @@ describe("workspaces", async () => { "3 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "bar"))).toBeTrue(); expect(await exists(join(packageDir, "node_modules", "boba"))).toBeTrue(); expect(await exists(join(packageDir, "node_modules", "pkg5"))).toBeTrue(); @@ -2896,6 +2972,8 @@ describe("workspaces", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", workspaces: ["packages/*"], @@ -2922,6 +3000,8 @@ describe("workspaces", async () => { "3 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "packages", "boba", "package.json")).json()).toEqual({ name: "boba", version: "1.0.0", @@ -2931,7 +3011,6 @@ describe("workspaces", async () => { }, }); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "@types", "bar", "boba", @@ -2957,6 +3036,8 @@ describe("workspaces", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "packages", "boba", "package.json")).json()).toEqual({ name: "boba", version: "1.0.0", @@ -2967,7 +3048,6 @@ describe("workspaces", async () => { }, }); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "@types", "bar", "boba", @@ -3081,6 +3161,7 @@ describe("workspaces", async () => { "2 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); ({ stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], @@ -3101,6 +3182,7 @@ describe("workspaces", async () => { "Checked 2 installs across 3 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); await rm(join(packageDir, "bun.lockb"), { recursive: true, force: true }); @@ -3126,6 +3208,7 @@ describe("workspaces", async () => { "2 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); ({ stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], @@ -3146,6 +3229,7 @@ describe("workspaces", async () => { "Checked 2 installs across 3 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); } } @@ -3196,6 +3280,8 @@ describe("workspaces", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ name: "workspace-1", version: "1.0.0", @@ -3223,6 +3309,8 @@ describe("workspaces", async () => { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ name: "workspace-1", version: "1.0.0", @@ -3278,6 +3366,8 @@ describe("workspaces", async () => { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "workspace-1", "package.json")).json()).toEqual({ name: "workspace-1", version: "1.0.0", @@ -3433,6 +3523,8 @@ describe("transitive file dependencies", () => { ]); var { out } = await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "14 packages installed"]); await checkHoistedFiles(); @@ -3442,11 +3534,15 @@ describe("transitive file dependencies", () => { // reinstall ({ out } = await runBunInstall(env, packageDir, { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "14 packages installed"]); await checkHoistedFiles(); ({ out } = await runBunInstall(env, packageDir, { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "1 package installed"]); await checkHoistedFiles(); @@ -3456,6 +3552,8 @@ describe("transitive file dependencies", () => { // install from workspace ({ out } = await runBunInstall(env, join(packageDir, "pkg1"))); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ @another-scope/file-dep@1.0.0", @@ -3473,11 +3571,15 @@ describe("transitive file dependencies", () => { expect(await exists(join(packageDir, "pkg1", "node_modules"))).toBeFalse(); ({ out } = await runBunInstall(env, join(packageDir, "pkg1"), { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "1 package installed"]); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); ({ out } = await runBunInstall(env, join(packageDir, "pkg1"), { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ @another-scope/file-dep@1.0.0", @@ -3537,6 +3639,8 @@ describe("transitive file dependencies", () => { ]); var { out } = await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ @another-scope/file-dep@1.0.1", @@ -3557,6 +3661,8 @@ describe("transitive file dependencies", () => { // reinstall ({ out } = await runBunInstall(env, packageDir, { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ @another-scope/file-dep@1.0.1", @@ -3573,6 +3679,8 @@ describe("transitive file dependencies", () => { await checkUnhoistedFiles(); ({ out } = await runBunInstall(env, packageDir, { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "1 package installed"]); await checkUnhoistedFiles(); @@ -3583,6 +3691,8 @@ describe("transitive file dependencies", () => { // install from workspace ({ out } = await runBunInstall(env, join(packageDir, "pkg1"))); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ @another-scope/file-dep@1.0.0", @@ -3599,12 +3709,16 @@ describe("transitive file dependencies", () => { await checkUnhoistedFiles(); ({ out } = await runBunInstall(env, join(packageDir, "pkg1"), { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "1 package installed"]); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); await rm(join(packageDir, "pkg1", "node_modules"), { recursive: true, force: true }); ({ out } = await runBunInstall(env, join(packageDir, "pkg1"), { savesLockfile: false })); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", "+ @another-scope/file-dep@1.0.0", @@ -3672,8 +3786,9 @@ describe("transitive file dependencies", () => { "13 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "@another-scope", "@scoped", "aliased-file-dep", @@ -3702,6 +3817,7 @@ describe("transitive file dependencies", () => { expect(err).not.toContain("panic:"); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "1 package installed"]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await checkHoistedFiles(); @@ -3723,7 +3839,6 @@ describe("transitive file dependencies", () => { expect(err).not.toContain("error:"); expect(err).not.toContain("panic:"); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "@another-scope", "@scoped", "aliased-file-dep", @@ -3733,6 +3848,7 @@ describe("transitive file dependencies", () => { "self-file-dep", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await checkHoistedFiles(); }); @@ -3789,7 +3905,9 @@ describe("transitive file dependencies", () => { "2 packages installed", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".cache", "pkg0", "pkg1"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual(["pkg0", "pkg1"]); expect(await file(join(packageDir, "node_modules", "pkg0", "package.json")).json()).toEqual({ name: "pkg0", version: "1.1.1", @@ -3816,6 +3934,7 @@ test("name from manifest is scoped and url encoded", async () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const files = await Promise.all([ file(join(packageDir, "node_modules", "@url", "encoding.2", "package.json")).json(), @@ -3844,6 +3963,8 @@ describe("update", () => { ); await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -3870,6 +3991,8 @@ describe("update", () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toMatchObject({ name: "a-dep", version: "1.0.10", @@ -3877,6 +4000,8 @@ describe("update", () => { // Update without args, `latest` should stay await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -3886,6 +4011,8 @@ describe("update", () => { // Update with `a-dep` and `--latest`, `latest` should be replaced with the installed version await runBunUpdate(env, packageDir, ["a-dep"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -3893,6 +4020,8 @@ describe("update", () => { }, }); await runBunUpdate(env, packageDir, ["--latest"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -3916,6 +4045,8 @@ describe("update", () => { }), ); async function check(version: string) { + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", dependency, "package.json")).json()).toMatchObject({ name: "a-dep", version: version.replace(/.*@/, ""), @@ -3957,12 +4088,16 @@ describe("update", () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ name: "no-deps", version: "1.0.1", }); let { out } = await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "Checked 1 install across 2 packages (no changes)"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -3973,6 +4108,8 @@ describe("update", () => { // another update does not change anything (previously the version would update because it was changed to `^1.0.1`) ({ out } = await runBunUpdate(env, packageDir)); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "Checked 1 install across 2 packages (no changes)"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4010,6 +4147,8 @@ describe("update", () => { if (latest) { await runBunUpdate(env, packageDir, ["--latest"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4032,6 +4171,8 @@ describe("update", () => { }); } else { await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4097,12 +4238,16 @@ describe("update", () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ name: "no-deps", version: "1.0.1", }); let { out } = await runBunUpdate(env, packageDir, ["no-deps"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "installed no-deps@1.0.1", "", expect.stringContaining("done"), ""]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4114,6 +4259,8 @@ describe("update", () => { // update with --latest should only change the update request and keep `~` ({ out } = await runBunUpdate(env, packageDir, ["no-deps", "--latest"])); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "installed no-deps@2.0.0", "", "1 package installed"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4137,6 +4284,8 @@ describe("update", () => { ); await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4160,6 +4309,8 @@ describe("update", () => { ); await runBunUpdate(env, packageDir, ["aliased-dep"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4183,6 +4334,7 @@ describe("update", () => { ); await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "package.json")).json()).toMatchObject({ name: "foo", @@ -4197,6 +4349,8 @@ describe("update", () => { }); const { out } = await runBunUpdate(env, packageDir, ["--latest"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "^ aliased-dep 5.0.0-alpha.150 -> 5.0.0-alpha.153", "", "1 package installed"]); expect(await file(join(packageDir, "package.json")).json()).toMatchObject({ name: "foo", @@ -4218,6 +4372,8 @@ describe("update", () => { ); let { out } = await runBunUpdate(env, packageDir, ["--no-save"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "+ a-dep@1.0.1", "", "1 package installed"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4237,6 +4393,8 @@ describe("update", () => { ); ({ out } = await runBunUpdate(env, packageDir, ["--no-save"])); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "+ a-dep@1.0.10", "", "1 package installed"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4247,6 +4405,8 @@ describe("update", () => { // now save ({ out } = await runBunUpdate(env, packageDir)); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "Checked 1 install across 2 packages (no changes)"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4267,6 +4427,8 @@ describe("update", () => { ); await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4278,6 +4440,8 @@ describe("update", () => { }); // update with package name does not update beyond version range await runBunUpdate(env, packageDir, ["dep-with-tags"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4290,6 +4454,8 @@ describe("update", () => { // now update with a higher version range await runBunUpdate(env, packageDir, ["dep-with-tags@^2.0.0"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -4348,6 +4514,8 @@ describe("update", () => { // initial install, update root let { out } = await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual([ "", "+ a-dep@1.0.10", @@ -4402,6 +4570,8 @@ describe("update", () => { "uses-what-bin", "a-dep@1.0.5", ])); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual([ "", "installed what-bin@1.5.0 with binaries:", @@ -4446,6 +4616,8 @@ describe("update", () => { }); ({ out } = await runBunUpdate(env, join(packageDir, "packages", "pkg1"), ["a-dep@^1.0.5"])); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "installed a-dep@1.0.10", "", expect.stringMatching(/(\[\d+\.\d+m?s\])/), ""]); expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).json()).toMatchObject({ name: "a-dep", @@ -4483,6 +4655,8 @@ describe("update", () => { ); const { out } = args ? await runBunUpdate(env, packageDir, ["a-dep"]) : await runBunUpdate(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", args ? "installed a-dep@1.0.10" : "+ a-dep@1.0.10", "", "1 package installed"]); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -4531,6 +4705,7 @@ describe("update", () => { ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ version: "1.0.0", @@ -4544,6 +4719,8 @@ describe("update", () => { // update no-deps, no range, no change let { out } = await runBunUpdate(env, packageDir, ["no-deps"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "installed no-deps@1.0.0", "", expect.stringMatching(/(\[\d+\.\d+m?s\])/), ""]); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ version: "1.0.0", @@ -4551,6 +4728,8 @@ describe("update", () => { // update package that doesn't exist to workspace, should add to package.json ({ out } = await runBunUpdate(env, join(packageDir, "packages", "pkg1"), ["no-deps"])); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "installed no-deps@2.0.0", "", "1 package installed"]); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ version: "1.0.0", @@ -4577,6 +4756,8 @@ describe("update", () => { ); ({ out } = await runBunUpdate(env, packageDir, ["no-deps"])); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toEqual(["", "installed no-deps@1.1.0", "", "1 package installed"]); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toMatchObject({ version: "1.1.0", @@ -4595,6 +4776,7 @@ describe("update", () => { ); await runBunUpdate(env, packageDir, ["no-deps", "--latest"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const files = await Promise.all([ file(join(packageDir, "node_modules", "no-deps", "package.json")).json(), @@ -4618,6 +4800,8 @@ test("packages dependening on each other with aliases does not infinitely loop", ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const files = await Promise.all([ file(join(packageDir, "node_modules", "alias-loop-1", "package.json")).json(), file(join(packageDir, "node_modules", "alias-loop-2", "package.json")).json(), @@ -4664,6 +4848,8 @@ test("it should re-populate .bin folder if package is reinstalled", async () => "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const bin = process.platform === "win32" ? "what-bin.exe" : "what-bin"; expect(Bun.which("what-bin", { PATH: join(packageDir, "node_modules", ".bin") })).toBe( join(packageDir, "node_modules", ".bin", bin), @@ -4698,6 +4884,8 @@ test("it should re-populate .bin folder if package is reinstalled", async () => expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(Bun.which("what-bin", { PATH: join(packageDir, "node_modules", ".bin") })).toBe( join(packageDir, "node_modules", ".bin", bin), ); @@ -4739,6 +4927,7 @@ test("one version with binary map", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toHaveBins(["map-bin", "map_bin"]); expect(join(packageDir, "node_modules", ".bin", "map-bin")).toBeValidBin(join("..", "map-bin", "bin", "map-bin")); @@ -4777,6 +4966,7 @@ test("multiple versions with binary map", async () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toHaveBins(["map-bin", "map_bin"]); expect(join(packageDir, "node_modules", ".bin", "map-bin")).toBeValidBin( @@ -4802,6 +4992,7 @@ test("duplicate dependency in optionalDependencies maintains sort order", async ); await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const lockfile = parseLockfile(packageDir); expect(lockfile).toMatchNodeModulesAt(packageDir); @@ -4880,6 +5071,7 @@ test("missing package on reinstall, some with binaries", async () => { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules", "native"), { recursive: true, force: true }); await rm(join(packageDir, "node_modules", "left-pad"), { recursive: true, force: true }); @@ -4922,6 +5114,7 @@ test("missing package on reinstall, some with binaries", async () => { expect.stringContaining("7 packages installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await exists(join(packageDir, "node_modules", "native", "package.json"))).toBe(true); expect(await exists(join(packageDir, "node_modules", "left-pad", "package.json"))).toBe(true); @@ -4943,9 +5136,9 @@ test("missing package on reinstall, some with binaries", async () => { // waiter thread is only a thing on Linux. for (const forceWaiterThread of isLinux ? [false, true] : [false]) { - const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; describe("lifecycle scripts" + (forceWaiterThread ? " (waiter thread)" : ""), async () => { test("root package with all lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; const writeScript = async (name: string) => { const contents = ` import { writeFileSync, existsSync, rmSync } from "fs"; @@ -4999,6 +5192,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); @@ -5054,6 +5249,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall exists!"); expect(await file(join(packageDir, "install.txt")).text()).toBe("install exists!"); expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall exists!"); @@ -5094,6 +5291,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { env: testEnv, })); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + err = await new Response(stderr).text(); out = await new Response(stdout).text(); expect(err).toContain("Saved lockfile"); @@ -5120,6 +5319,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); test("workspace lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5187,6 +5388,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { var out = await new Response(stdout).text(); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "2 packages installed"]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); @@ -5209,6 +5411,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); test("dependency lifecycle scripts run before root lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const script = '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]'; await writeFile( join(packageDir, "package.json"), @@ -5248,9 +5452,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("install a dependency with lifecycle scripts, then add to trusted dependencies and install again", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5287,6 +5494,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); expect(await exists(join(depDir, "preinstall.txt"))).toBeFalse(); @@ -5329,6 +5537,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("Checked 1 install across 2 packages (no changes)"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); @@ -5339,6 +5548,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); test("adding a package without scripts to trustedDependencies", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5372,7 +5583,9 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "1 package installed", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); const isWindows = process.platform === "win32"; const what_bin_bins = !isWindows ? ["what-bin"] : ["what-bin.bunx", "what-bin.exe"]; // prettier-ignore @@ -5397,6 +5610,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); await rm(join(packageDir, "bun.lockb")); @@ -5431,7 +5645,9 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "1 package installed", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); ({ stdout, stderr, exited } = spawn({ @@ -5453,7 +5669,9 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); // add it to trusted dependencies @@ -5488,11 +5706,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", "what-bin"]); expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(what_bin_bins); }); test("lifecycle scripts run if node_modules is deleted", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5526,7 +5748,10 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("error:"); expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + await rm(join(packageDir, "node_modules"), { force: true, recursive: true }); + await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); ({ stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], cwd: packageDir, @@ -5548,9 +5773,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("error:"); expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("INIT_CWD is set to the correct directory", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5602,12 +5830,16 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "test.txt")).text()).toBe(packageDir); expect(await file(join(packageDir, "node_modules/lifecycle-init-cwd/test.txt")).text()).toBe(packageDir); expect(await file(join(packageDir, "node_modules/another-init-cwd/test.txt")).text()).toBe(packageDir); }); test("failing lifecycle script should print output", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5632,11 +5864,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { const err = await new Response(stderr).text(); expect(err).toContain("hello"); expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const out = await new Response(stdout).text(); expect(out).toBeEmpty(); }); test("failing root lifecycle script should print output correctly", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5657,6 +5893,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await Bun.readableStreamToText(stdout)).toBeEmpty(); const err = await Bun.readableStreamToText(stderr); expect(err).toContain("error: Oops!"); @@ -5664,6 +5902,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); test("exit 0 in lifecycle scripts works", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5697,9 +5937,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("--ignore-scripts should skip lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5733,9 +5976,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should add `node-gyp rebuild` as the `install` script when `install` and `postinstall` don't exist and `binding.gyp` exists in the root of the package", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5769,10 +6015,14 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("2 packages installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules/binding-gyp-scripts/build.node"))).toBeTrue(); }); test("automatic node-gyp scripts should not run for untrusted dependencies, and should run after adding to `trustedDependencies`", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const packageJSON: any = { name: "foo", version: "1.0.0", @@ -5806,6 +6056,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeFalse(); packageJSON.trustedDependencies = ["binding-gyp-scripts"]; @@ -5827,10 +6079,14 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("warn:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeTrue(); }); test("automatic node-gyp scripts work in package root", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5865,6 +6121,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); await rm(join(packageDir, "build.node")); @@ -5879,10 +6137,14 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { })); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); }); test("auto node-gyp scripts work when scripts exists other than `install` and `preinstall`", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -5922,11 +6184,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); }); for (const script of ["install", "preinstall"]) { test(`does not add auto node-gyp script when ${script} script exists`, async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const packageJSON: any = { name: "foo", version: "1.0.0", @@ -5961,11 +6227,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "build.node"))).toBeFalse(); }); } test("git dependencies also run `preprepare`, `prepare`, and `postprepare` scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6001,6 +6271,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeFalse(); expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeFalse(); expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeFalse(); @@ -6036,6 +6308,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("warn:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeTrue(); expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeTrue(); expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeTrue(); @@ -6045,6 +6319,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); test("root lifecycle scripts should wait for dependency lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6086,6 +6362,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "2 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); async function createPackagesWithScripts( @@ -6128,6 +6405,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { } test("reach max concurrent scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const scripts = { "preinstall": `${bunExe()} -e 'Bun.sleepSync(500)'`, }; @@ -6156,9 +6435,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "4 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("stress test", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const dependenciesList = await createPackagesWithScripts(500, { "postinstall": `${bunExe()} --version`, }); @@ -6187,9 +6469,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should install and use correct binary version", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + // this should install `what-bin` in two places: // // - node_modules/.bin/what-bin@1.5.0 @@ -6232,6 +6517,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "what-bin", "what-bin.js")).text()).toContain( "what-bin@1.5.0", ); @@ -6274,6 +6561,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("warn:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "node_modules", "what-bin", "what-bin.js")).text()).toContain( "what-bin@1.0.0", ); @@ -6305,9 +6594,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("3 packages installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("node-gyp should always be available for lifecycle scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6336,10 +6628,13 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { // if node-gyp isn't available, it would return a non-zero exit code expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); // if this test fails, `electron` might be removed from the default list test("default trusted dependencies should work", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6374,9 +6669,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(out).not.toContain("Blocked"); expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("default trusted dependencies should not be used of trustedDependencies is populated", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6416,10 +6714,13 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, ".bun-cache"), { recursive: true, force: true }); await rm(join(packageDir, "bun.lockb")); await writeFile( @@ -6462,12 +6763,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); }); test("does not run any scripts if trustedDependencies is an empty list", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6506,11 +6810,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); }); test("will run default trustedDependencies after install that didn't include them", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6549,6 +6857,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); await writeFile( @@ -6583,11 +6893,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); }); describe("--trust", async () => { test("unhoisted untrusted scripts, none at root node_modules", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await Promise.all([ write( join(packageDir, "package.json"), @@ -6612,6 +6926,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { ]); await runBunInstall(testEnv, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); const results = await Promise.all([ exists(join(packageDir, "node_modules", "pkg1", "node_modules", "uses-what-bin")), @@ -6702,6 +7017,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { ]; for (const { label, packageJson } of trustTests) { test(label, async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJson)); let { stdout, stderr, exited } = spawn({ @@ -6760,6 +7077,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { } describe("packages without lifecycle scripts", async () => { test("initial install", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6798,6 +7117,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); }); test("already installed", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6876,6 +7197,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { describe("updating trustedDependencies", async () => { test("existing trustedDependencies, unchanged trustedDependencies", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6909,6 +7232,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("2 packages installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -6939,9 +7264,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 2 installs across 3 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("existing trustedDependencies, removing trustedDependencies", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -6975,6 +7303,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("2 packages installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -7017,6 +7347,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 2 installs across 3 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", dependencies: { @@ -7027,6 +7359,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); test("non-existent trustedDependencies, then adding it", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -7059,6 +7393,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); expect(await file(join(packageDir, "package.json")).json()).toEqual({ name: "foo", @@ -7103,11 +7439,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "Checked 1 install across 2 packages (no changes)", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); }); }); test("node -p should work in postinstall scripts", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -7139,11 +7479,14 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("error:"); expect(err).not.toContain("warn:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); }); test("ensureTempNodeGypScript works", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -7175,9 +7518,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { expect(err).not.toContain("error:"); expect(err).not.toContain("warn:"); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("bun pm trust and untrusted on missing package", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -7212,6 +7558,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { ]); expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); // remove uses-what-bin from node_modules, bun pm trust and untrusted should handle missing package await rm(join(packageDir, "node_modules", "uses-what-bin"), { recursive: true, force: true }); @@ -7253,6 +7600,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { // for both cases, we need to update this test for (const withRm of [true, false]) { test(withRm ? "withRm" : "withoutRm", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -7289,6 +7638,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); ({ stdout, stderr, exited } = spawn({ @@ -7427,6 +7778,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { describe.if(!forceWaiterThread || process.platform === "linux")("does not use 100% cpu", async () => { test("install", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + await writeFile( join(packageDir, "package.json"), JSON.stringify({ @@ -7448,12 +7801,15 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); expect(await proc.exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(proc.resourceUsage()?.cpuTime.total).toBeLessThan(750_000); }); // https://github.com/oven-sh/bun/issues/11252 test.todoIf(isWindows)("bun pm trust", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const dep = isWindows ? "uses-what-bin-slow-window" : "uses-what-bin-slow"; await writeFile( join(packageDir, "package.json"), @@ -7475,6 +7831,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { }); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); expect(await exists(join(packageDir, "node_modules", dep, "what-bin.txt"))).toBeFalse(); @@ -7497,6 +7854,8 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { describe("stdout/stderr is inherited from root scripts during install", async () => { test("without packages", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const exe = bunExe().replace(/\\/g, "\\\\"); await writeFile( join(packageDir, "package.json"), @@ -7541,9 +7900,12 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("with a package", async () => { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + const exe = bunExe().replace(/\\/g, "\\\\"); await writeFile( join(packageDir, "package.json"), @@ -7595,6 +7957,7 @@ for (const forceWaiterThread of isLinux ? [false, true] : [false]) { "", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); }); } @@ -7755,6 +8118,8 @@ test("it should be able to find binary in node_modules/.bin from parent director expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(await file(join(packageDir, "morePackageDir", "missing-bin.txt")).text()).toBe("missing-bin@WHAT"); }); @@ -7891,6 +8256,7 @@ describe("semver", () => { "1 package installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); } @@ -7919,6 +8285,8 @@ describe("semver", () => { var out = await new Response(stdout).text(); expect(err).toContain('InvalidDependencyVersion parsing version "pre-1 || pre-2"'); expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + expect(out).toBeEmpty(); }); }); @@ -8117,6 +8485,7 @@ for (let i = 0; i < prereleaseTests.length; i++) { version: expected, } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); } }); @@ -8250,6 +8619,7 @@ for (let i = 0; i < prereleaseFailTests.length; i++) { expect(out).toBeEmpty(); expect(err).toContain(`No version matching "${depVersion}" found for specifier "${depName}"`); expect(await exited).toBe(1); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); } }); @@ -8291,7 +8661,6 @@ describe("yarn tests", () => { "6 packages installed", ]); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "dragon-test-1-a", "dragon-test-1-b", "dragon-test-1-c", @@ -8317,6 +8686,7 @@ describe("yarn tests", () => { }, } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 2", async () => { @@ -8379,7 +8749,6 @@ describe("yarn tests", () => { "3 packages installed", ]); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "dragon-test-2-a", "dragon-test-2-b", "no-deps", @@ -8390,6 +8759,7 @@ describe("yarn tests", () => { }); expect(await exists(join(packageDir, "dragon-test-2-a", "node_modules"))).toBeFalse(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 3", async () => { @@ -8425,7 +8795,6 @@ describe("yarn tests", () => { "3 packages installed", ]); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "dragon-test-3-a", "dragon-test-3-b", "no-deps", @@ -8441,6 +8810,7 @@ describe("yarn tests", () => { }, } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 4", async () => { @@ -8485,12 +8855,7 @@ describe("yarn tests", () => { expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "3 packages installed"]); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", - "my-workspace", - "no-deps", - "peer-deps", - ]); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual(["my-workspace", "no-deps", "peer-deps"]); expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({ name: "no-deps", version: "1.0.0", @@ -8503,6 +8868,7 @@ describe("yarn tests", () => { }, } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 5", async () => { @@ -8559,7 +8925,6 @@ describe("yarn tests", () => { expect(err).not.toContain("error:"); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "5 packages installed"]); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "a", "b", "no-deps", @@ -8582,6 +8947,7 @@ describe("yarn tests", () => { version: "1.0.0", } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test.todo("dragon test 6", async () => { @@ -8697,6 +9063,7 @@ describe("yarn tests", () => { "7 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test.todo("dragon test 7", async () => { @@ -8738,6 +9105,7 @@ describe("yarn tests", () => { "7 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await writeFile( join(packageDir, "test.js"), @@ -8777,6 +9145,7 @@ describe("yarn tests", () => { ), ).toBeFalse(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 8", async () => { @@ -8818,6 +9187,7 @@ describe("yarn tests", () => { "4 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 9", async () => { @@ -8859,6 +9229,7 @@ describe("yarn tests", () => { await file(join(packageDir, "node_modules", "second", "package.json")).json(), ); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test.todo("dragon test 10", async () => { @@ -8932,6 +9303,7 @@ describe("yarn tests", () => { " packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("dragon test 12", async () => { @@ -8986,7 +9358,6 @@ describe("yarn tests", () => { expect(err).not.toContain("not found"); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual(["", "4 packages installed"]); expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ - ".cache", "fake-peer-deps", "no-deps", "peer-deps", @@ -9001,6 +9372,7 @@ describe("yarn tests", () => { }, } as any); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should not warn when the peer dependency resolution is compatible", async () => { @@ -9038,8 +9410,9 @@ describe("yarn tests", () => { "", "2 packages installed", ]); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".cache", "no-deps", "peer-deps-fixed"]); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual(["no-deps", "peer-deps-fixed"]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should warn when the peer dependency resolution is incompatible", async () => { @@ -9077,8 +9450,9 @@ describe("yarn tests", () => { "", "2 packages installed", ]); - expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".cache", "no-deps", "peer-deps-fixed"]); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual(["no-deps", "peer-deps-fixed"]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should install in such a way that two identical packages with different peer dependencies are different instances", async () => { @@ -9117,6 +9491,7 @@ describe("yarn tests", () => { "5 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await writeFile( join(packageDir, "test.js"), @@ -9184,6 +9559,7 @@ describe("yarn tests", () => { expect(out).toBe("true\ntrue\nfalse\n"); expect(err).toBeEmpty(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should install in such a way that two identical packages with the same peer dependencies are the same instances (simple)", async () => { @@ -9222,6 +9598,7 @@ describe("yarn tests", () => { "4 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await writeFile( join(packageDir, "test.js"), @@ -9245,6 +9622,7 @@ describe("yarn tests", () => { expect(out).toBe("true\n"); expect(err).toBeEmpty(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should install in such a way that two identical packages with the same peer dependencies are the same instances (complex)", async () => { @@ -9285,6 +9663,7 @@ describe("yarn tests", () => { "4 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await writeFile( join(packageDir, "test.js"), @@ -9308,6 +9687,7 @@ describe("yarn tests", () => { expect(out).toBe("true\n"); expect(err).toBeEmpty(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it shouldn't deduplicate two packages with similar peer dependencies but different names", async () => { @@ -9348,6 +9728,7 @@ describe("yarn tests", () => { "3 packages installed", ]); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await writeFile(join(packageDir, "test.js"), `console.log(require('peer-deps') === require('peer-deps-too'));`); @@ -9365,6 +9746,7 @@ describe("yarn tests", () => { expect(out).toBe("false\n"); expect(err).toBeEmpty(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); test("it should reinstall and rebuild dependencies deleted by the user on the next install", async () => { @@ -9404,6 +9786,7 @@ describe("yarn tests", () => { ]); expect(await exists(join(packageDir, "node_modules/one-dep-scripted/success.txt"))).toBeTrue(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); await rm(join(packageDir, "node_modules/one-dep-scripted"), { recursive: true, force: true }); @@ -9423,6 +9806,7 @@ describe("yarn tests", () => { expect(err).not.toContain("not found"); expect(await exists(join(packageDir, "node_modules/one-dep-scripted/success.txt"))).toBeTrue(); expect(await exited).toBe(0); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); }); }); @@ -9463,6 +9847,8 @@ test("tarball `./` prefix, duplicate directory with file, and empty directory", // the output because it is an empty directory. await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + const prefix = join(packageDir, "node_modules", "tarball-without-package-prefix"); const results = await Promise.all([ file(join(prefix, "package.json")).json(), @@ -9500,6 +9886,190 @@ test("tarball `./` prefix, duplicate directory with file, and empty directory", ); }); +describe("outdated", () => { + const edgeCaseTests = [ + { + description: "normal dep, smaller than column title", + packageJson: { + dependencies: { + "no-deps": "1.0.0", + }, + }, + }, + { + description: "normal dep, larger than column title", + packageJson: { + dependencies: { + "prereleases-1": "1.0.0-future.1", + }, + }, + }, + { + description: "dev dep, smaller than column title", + packageJson: { + devDependencies: { + "no-deps": "1.0.0", + }, + }, + }, + { + description: "dev dep, larger than column title", + packageJson: { + devDependencies: { + "prereleases-1": "1.0.0-future.1", + }, + }, + }, + { + description: "peer dep, smaller than column title", + packageJson: { + peerDependencies: { + "no-deps": "1.0.0", + }, + }, + }, + { + description: "peer dep, larger than column title", + packageJson: { + peerDependencies: { + "prereleases-1": "1.0.0-future.1", + }, + }, + }, + { + description: "optional dep, smaller than column title", + packageJson: { + optionalDependencies: { + "no-deps": "1.0.0", + }, + }, + }, + { + description: "optional dep, larger than column title", + packageJson: { + optionalDependencies: { + "prereleases-1": "1.0.0-future.1", + }, + }, + }, + ]; + + for (const { description, packageJson } of edgeCaseTests) { + test(description, async () => { + await write(join(packageDir, "package.json"), JSON.stringify(packageJson)); + await runBunInstall(env, packageDir); + assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl()); + + const testEnv = { ...env, FORCE_COLOR: "1" }; + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "outdated"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(await exited).toBe(0); + + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(err).not.toContain("panic:"); + const out = await Bun.readableStreamToText(stdout); + expect(out).toMatchSnapshot(); + }); + } + test("in workspace", async () => { + await Promise.all([ + write( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + workspaces: ["pkg1"], + dependencies: { + "no-deps": "1.0.0", + }, + }), + ), + write( + join(packageDir, "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + dependencies: { + "a-dep": "1.0.1", + }, + }), + ), + ]); + + await runBunInstall(env, packageDir); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "outdated"], + cwd: join(packageDir, "pkg1"), + stdout: "pipe", + stderr: "pipe", + env, + }); + + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(err).not.toContain("panic:"); + let out = await Bun.readableStreamToText(stdout); + expect(out).toContain("a-dep"); + expect(out).not.toContain("no-deps"); + expect(await exited).toBe(0); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "outdated"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + })); + + const err2 = await Bun.readableStreamToText(stderr); + expect(err2).not.toContain("error:"); + expect(err2).not.toContain("panic:"); + let out2 = await Bun.readableStreamToText(stdout); + expect(out2).toContain("no-deps"); + expect(out2).not.toContain("a-dep"); + expect(await exited).toBe(0); + }); + + test("NO_COLOR works", async () => { + await write( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + dependencies: { + "a-dep": "1.0.1", + }, + }), + ); + + await runBunInstall(env, packageDir); + + const testEnv = { ...env, NO_COLOR: "1" }; + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "outdated"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("error:"); + expect(err).not.toContain("panic:"); + + const out = await Bun.readableStreamToText(stdout); + expect(out).toContain("a-dep"); + expect(out).toMatchSnapshot(); + + expect(await exited).toBe(0); + }); +}); + // TODO: setup verdaccio to run across multiple test files, then move this and a few other describe // scopes (update, hoisting, ...) to other files // diff --git a/test/harness.ts b/test/harness.ts index 8993af1da7..22a57ffbf3 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -5,6 +5,8 @@ import { isAbsolute, join, dirname } from "path"; import fs, { openSync, closeSync } from "node:fs"; import os from "node:os"; import { heapStats } from "bun:jsc"; +import { npm_manifest_test_helpers } from "bun:internal-for-testing"; +const { parseManifest } = npm_manifest_test_helpers; type Awaitable = T | Promise; @@ -1265,6 +1267,15 @@ https://buildkite.com/docs/pipelines/security/secrets/buildkite-secrets`; return value; } +export function assertManifestsPopulated(absCachePath: string, registryUrl: string) { + for (const file of fs.readdirSync(absCachePath)) { + if (!file.endsWith(".npm")) continue; + + const manifest = parseManifest(join(absCachePath, file), registryUrl); + expect(manifest.versions.length).toBeGreaterThan(0); + } +} + // Make it easier to run some node tests. Object.defineProperty(globalThis, "gc", { value: Bun.gc,