diff --git a/build.zig b/build.zig index 88f2e6f05d..5a3eb7868a 100644 --- a/build.zig +++ b/build.zig @@ -1,7 +1,7 @@ const std = @import("std"); const pathRel = std.fs.path.relative; const builtin = @import("builtin"); -const Wyhash = @import("./src/wyhash.zig").Wyhash; +const Wyhash11 = @import("./src/wyhash.zig").Wyhash11; const zig_version = builtin.zig_version; @@ -84,7 +84,7 @@ const BunBuildOptions = struct { pub fn updateRuntime(this: *BunBuildOptions) anyerror!void { if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| { defer file.close(); - const runtime_hash = Wyhash.hash( + const runtime_hash = Wyhash11.hash( 0, try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()), ); @@ -97,7 +97,7 @@ const BunBuildOptions = struct { if (std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only })) |file| { defer file.close(); - const fallback_hash = Wyhash.hash( + const fallback_hash = Wyhash11.hash( 0, try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()), ); diff --git a/src/StaticHashMap.zig b/src/StaticHashMap.zig index 4506cbfb1c..8065dfb8d2 100644 --- a/src/StaticHashMap.zig +++ b/src/StaticHashMap.zig @@ -258,6 +258,18 @@ fn HashMapMixin( return self.hasContext(key, undefined); } + pub fn hasWithHash(self: *Self, key_hash: u64) bool { + assert(key_hash != Self.empty_hash); + + for (self.entries[key_hash >> self.shift ..]) |entry| { + if (entry.hash >= key_hash) { + return entry.hash == key_hash; + } + } + + return false; + } + pub fn hasContext(self: *Self, key: K, ctx: Context) bool { const hash = ctx.hash(key); assert(hash != Self.empty_hash); diff --git a/src/bun.zig b/src/bun.zig index 0d22a12491..4322a1f32f 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1805,7 +1805,7 @@ pub const WTF = struct { pub const ArenaAllocator = @import("./ArenaAllocator.zig").ArenaAllocator; -pub const Wyhash = @import("./wyhash.zig").Wyhash; +pub const Wyhash11 = @import("./wyhash.zig").Wyhash11; pub const RegularExpression = @import("./bun.js/bindings/RegularExpression.zig").RegularExpression; pub inline fn assertComptime() void { diff --git a/src/bundler/entry_points.zig b/src/bundler/entry_points.zig index da3f548412..16dcdd3257 100644 --- a/src/bundler/entry_points.zig +++ b/src/bundler/entry_points.zig @@ -242,7 +242,7 @@ pub const MacroEntryPoint = struct { source: logger.Source = undefined, pub fn generateID(entry_path: string, function_name: string, buf: []u8, len: *u32) i32 { - var hasher = bun.Wyhash.init(0); + var hasher = bun.Wyhash11.init(0); hasher.update(js_ast.Macro.namespaceWithColon); hasher.update(entry_path); hasher.update(function_name); diff --git a/src/cli.zig b/src/cli.zig index b120458329..94d572de59 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1223,7 +1223,7 @@ pub const Command = struct { } var next_arg = ((args_iter.next()) orelse return .AutoCommand); - while (next_arg[0] == '-' and !(next_arg.len > 1 and next_arg[1] == 'e')) { + while (next_arg.len > 0 and next_arg[0] == '-' and !(next_arg.len > 1 and next_arg[1] == 'e')) { next_arg = ((args_iter.next()) orelse return .AutoCommand); } diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index d6706c85d9..ad36d2e49b 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -243,7 +243,8 @@ pub const BunxCommand = struct { const package_name = maybe_package_name.?; - var requests_buf = bun.PackageManager.UpdateRequest.Array{}; + var requests_buf = bun.PackageManager.UpdateRequest.Array.initCapacity(ctx.allocator, 64) catch bun.outOfMemory(); + defer requests_buf.deinit(ctx.allocator); const update_requests = bun.PackageManager.UpdateRequest.parse( ctx.allocator, ctx.log, diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 2924822cbf..5cfb09ce91 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -1,9 +1,12 @@ const std = @import("std"); +const Progress = std.Progress; const bun = @import("root").bun; const Global = bun.Global; const Output = bun.Output; const string = bun.string; const strings = bun.strings; +const log = bun.log; +const logger = bun.logger; const Command = @import("../cli.zig").Command; const Fs = @import("../fs.zig"); const Dependency = @import("../install/dependency.zig"); @@ -15,17 +18,22 @@ const Lockfile = @import("../install/lockfile.zig"); const NodeModulesFolder = Lockfile.Tree.NodeModulesFolder; const Path = @import("../resolver/resolve_path.zig"); const String = @import("../install/semver.zig").String; +const ArrayIdentityContext = bun.ArrayIdentityContext; +const DepIdSet = std.ArrayHashMapUnmanaged(DependencyID, void, ArrayIdentityContext, false); +const Environment = bun.Environment; fn handleLoadLockfileErrors(load_lockfile: Lockfile.LoadFromDiskResult, pm: *PackageManager) void { if (load_lockfile == .not_found) { - if (pm.options.log_level != .silent) - Output.prettyErrorln("Lockfile not found", .{}); + if (pm.options.log_level != .silent) { + Output.errGeneric("Lockfile not found", .{}); + } Global.exit(1); } if (load_lockfile == .err) { - if (pm.options.log_level != .silent) - Output.prettyErrorln("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)}); + if (pm.options.log_level != .silent) { + Output.errGeneric("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)}); + } Global.exit(1); } } @@ -86,16 +94,20 @@ pub const PackageManagerCommand = struct { Output.prettyln( \\bun pm: Package manager utilities \\ - \\ bun pm bin print the path to bin folder - \\ bun pm -g bin print the global path to bin folder - \\ bun pm ls list the dependency tree according to the current lockfile - \\ bun pm ls --all list the entire dependency tree according to the current lockfile - \\ bun pm hash generate & print the hash of the current lockfile - \\ bun pm hash-string print the string used to hash the lockfile - \\ bun pm hash-print print the hash stored in the current lockfile - \\ bun pm cache print the path to the cache folder - \\ bun pm cache rm clear the cache - \\ bun pm migrate migrate another package manager's lockfile without installing anything + \\ bun pm bin print the path to bin folder + \\ -g bin print the global path to bin folder + \\ bun pm ls list the dependency tree according to the current lockfile + \\ --all list the entire dependency tree according to the current lockfile + \\ bun pm hash generate & print the hash of the current lockfile + \\ bun pm hash-string print the string used to hash the lockfile + \\ bun pm hash-print print the hash stored in the current lockfile + \\ bun pm cache print the path to the cache folder + \\ bun pm cache rm clear the cache + \\ bun pm migrate migrate another package manager's lockfile without installing anything + \\ bun pm trust(ed) print current trusted and untrusted dependencies with scripts + \\ \ trust dependencies and run scripts + \\ --all trust all untrusted dependencies and run their scripts + \\ --default print the list of default trusted dependencies \\ \\Learn more about these at https://bun.sh/docs/cli/pm \\ @@ -155,7 +167,7 @@ pub const PackageManagerCommand = struct { const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); handleLoadLockfileErrors(load_lockfile, pm); - _ = try pm.lockfile.hasMetaHashChanged(false); + _ = try pm.lockfile.hasMetaHashChanged(false, pm.lockfile.packages.len); Output.flush(); Output.disableBuffering(); @@ -175,7 +187,7 @@ pub const PackageManagerCommand = struct { const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); handleLoadLockfileErrors(load_lockfile, pm); - _ = try pm.lockfile.hasMetaHashChanged(true); + _ = try pm.lockfile.hasMetaHashChanged(true, pm.lockfile.packages.len); Global.exit(0); } else if (strings.eqlComptime(subcommand, "cache")) { var dir: [bun.MAX_PATH_BYTES]u8 = undefined; @@ -234,6 +246,379 @@ pub const PackageManagerCommand = struct { } Output.writer().writeAll(outpath) catch {}; + Global.exit(0); + } else if (strings.eqlComptime(subcommand, "trusted") or (strings.eqlComptime(subcommand, "trust"))) { + + // do this before loading lockfile because you don't need a lockfile + // to see the default trusted dependencies + if (strings.leftHasAnyInRight(args, &.{"--default"})) { + Output.print("Default trusted dependencies ({d}):\n", .{Lockfile.default_trusted_dependencies_list.len}); + for (Lockfile.default_trusted_dependencies_list) |name| { + Output.pretty(" - {s}\n", .{name}); + } + + Global.exit(0); + } + + const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); + handleLoadLockfileErrors(load_lockfile, pm); + try pm.updateLockfileIfNeeded(load_lockfile); + const buf = pm.lockfile.buffers.string_bytes.items; + + if (args.len == 2) { + // no args, print information for trusted and untrusted dependencies with scripts. + const packages = pm.lockfile.packages.slice(); + const metas: []Lockfile.Package.Meta = packages.items(.meta); + const scripts: []Lockfile.Package.Scripts = packages.items(.scripts); + const resolutions: []Install.Resolution = packages.items(.resolution); + + var trusted_set: std.AutoArrayHashMapUnmanaged(u64, String) = .{}; + var untrusted_dep_ids: std.AutoArrayHashMapUnmanaged(DependencyID, void) = .{}; + defer untrusted_dep_ids.deinit(ctx.allocator); + + // loop through all dependencies, print all the trusted packages, and collect + // untrusted packages with lifecycle scripts + for (pm.lockfile.buffers.dependencies.items, 0..) |dep, i| { + const dep_id: DependencyID = @intCast(i); + const package_id = pm.lockfile.buffers.resolutions.items[dep_id]; + if (package_id == Install.invalid_package_id) continue; + + // called alias because a dependency name is not always the package name + const alias = dep.name.slice(buf); + + if (metas[package_id].hasInstallScript()) { + if (pm.lockfile.hasTrustedDependency(alias)) { + // can't put alias directly because it might be inline + try trusted_set.put(ctx.allocator, dep.name_hash, dep.name); + } else { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); + } + } + } + + { + const Sorter = struct { + buf: string, + pub fn lessThan(this: @This(), rhs: String, lhs: String) bool { + return rhs.order(&lhs, this.buf, this.buf) == .lt; + } + }; + const aliases = trusted_set.values(); + std.sort.pdq(String, aliases, Sorter{ .buf = buf }, Sorter.lessThan); + + Output.pretty("Trusted dependencies ({d}):\n", .{aliases.len}); + for (aliases) |alias| { + Output.pretty(" - {s}\n", .{alias.slice(buf)}); + } else { + Output.pretty("\n", .{}); + } + + trusted_set.deinit(ctx.allocator); + } + + if (untrusted_dep_ids.count() == 0) { + Output.print("Untrusted dependencies (0):\n", .{}); + Global.exit(0); + } + + var untrusted_with_scripts: std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(struct { + dep_id: DependencyID, + scripts_list: Lockfile.Package.Scripts.List, + })) = .{}; + defer untrusted_with_scripts.deinit(ctx.allocator); + + var tree_iterator = Lockfile.Tree.Iterator.init(pm.lockfile); + + const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); + var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; + defer abs_node_modules_path.deinit(ctx.allocator); + try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); + try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); + + while (tree_iterator.nextNodeModulesFolder(null)) |node_modules| { + // + 1 because we want to keep the path separator + abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; + try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); + + var node_modules_dir = bun.openDir(std.fs.cwd(), node_modules.relative_path) catch |err| { + if (err == error.ENOENT) continue; + return err; + }; + defer node_modules_dir.close(); + + for (node_modules.dependencies) |dep_id| { + if (untrusted_dep_ids.contains(dep_id)) { + const dep = pm.lockfile.buffers.dependencies.items[dep_id]; + const alias = dep.name.slice(buf); + const package_id = pm.lockfile.buffers.resolutions.items[dep_id]; + const resolution = &resolutions[package_id]; + var package_scripts = scripts[package_id]; + + if (try package_scripts.getList( + pm.log, + pm.lockfile, + node_modules_dir, + abs_node_modules_path.items, + alias, + resolution, + )) |scripts_list| { + if (scripts_list.items.len == 0) continue; + const key = try ctx.allocator.dupe(u8, alias); + const gop = try untrusted_with_scripts.getOrPut(ctx.allocator, key); + if (!gop.found_existing) { + gop.value_ptr.* = .{}; + } else { + ctx.allocator.free(key); + } + + try gop.value_ptr.append(ctx.allocator, .{ .dep_id = dep_id, .scripts_list = scripts_list }); + } + } + } + } + + if (untrusted_with_scripts.count() == 0) { + Output.print("Untrusted dependencies (0):\n", .{}); + Global.exit(0); + } + + const Sorter = struct { + pub fn lessThan(_: void, rhs: string, lhs: string) bool { + return std.mem.order(u8, rhs, lhs) == .lt; + } + }; + + const aliases = untrusted_with_scripts.keys(); + std.sort.pdq(string, aliases, {}, Sorter.lessThan); + try untrusted_with_scripts.reIndex(ctx.allocator); + + Output.print("Untrusted dependencies ({d}):\n", .{aliases.len}); + + for (aliases) |alias| { + const _entry = untrusted_with_scripts.get(alias); + + if (comptime bun.Environment.allow_assert) { + std.debug.assert(_entry != null); + } + + if (_entry) |entry| { + if (comptime bun.Environment.allow_assert) { + std.debug.assert(entry.items.len > 0); + } + + Output.pretty(" - {s}\n", .{alias}); + } + } + + Global.exit(0); + } + + // this isn't great, flags could be in this slice, but it works + const packages_to_trust = args[2..]; + const trust_all = strings.leftHasAnyInRight(args, &.{ "-a", "--all" }); + + const packages = pm.lockfile.packages.slice(); + const metas: []Lockfile.Package.Meta = packages.items(.meta); + const resolutions: []Install.Resolution = packages.items(.resolution); + const scripts: []Lockfile.Package.Scripts = packages.items(.scripts); + + var untrusted_dep_ids: DepIdSet = .{}; + defer untrusted_dep_ids.deinit(ctx.allocator); + + // .1 go through all installed dependencies and find untrusted ones with scripts + // from packages through cli, or all if --all. + // .2 iterate through node_modules folder and spawn lifecycle scripts for each + // untrusted dependency from step 1. + // .3 add the untrusted dependencies to package.json and lockfile.trusted_dependencies. + + for (pm.lockfile.buffers.dependencies.items, 0..) |dep, i| { + const dep_id: u32 = @intCast(i); + const package_id = pm.lockfile.buffers.resolutions.items[dep_id]; + if (package_id == Install.invalid_package_id) continue; + + const alias = dep.name.slice(buf); + + if (metas[package_id].hasInstallScript()) { + if (trust_all and !pm.lockfile.hasTrustedDependency(alias)) { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); + continue; + } + + for (packages_to_trust) |package_name_from_cli| { + if (strings.eqlLong(package_name_from_cli, alias, true) and !pm.lockfile.hasTrustedDependency(alias)) { + try untrusted_dep_ids.put(ctx.allocator, dep_id, {}); + continue; + } + } + } + } + + if (untrusted_dep_ids.count() == 0) Global.exit(0); + + // instead of running them right away, we group scripts by depth in the node_modules + // file structure, then run in descending order. this ensures lifecycle scripts are run + // in the correct order as they would during a normal install + var tree_iter = Lockfile.Tree.Iterator.init(pm.lockfile); + + const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir); + var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{}; + defer abs_node_modules_path.deinit(ctx.allocator); + try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash); + try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep); + + var package_names_to_add: std.StringArrayHashMapUnmanaged(void) = .{}; + var scripts_at_depth: std.AutoArrayHashMapUnmanaged(usize, std.ArrayListUnmanaged(Lockfile.Package.Scripts.List)) = .{}; + defer { + var iter = scripts_at_depth.iterator(); + while (iter.next()) |entry| { + for (entry.value_ptr.items) |item| { + item.deinit(ctx.allocator); + } + entry.value_ptr.deinit(ctx.allocator); + } + scripts_at_depth.deinit(ctx.allocator); + package_names_to_add.deinit(ctx.allocator); + } + + var scripts_count: usize = 0; + + while (tree_iter.nextNodeModulesFolder(null)) |node_modules| { + abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1; + try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path); + + var node_modules_dir = bun.openDir(std.fs.cwd(), node_modules.relative_path) catch |err| { + if (err == error.ENOENT) continue; + return err; + }; + defer node_modules_dir.close(); + + for (node_modules.dependencies) |dep_id| { + if (untrusted_dep_ids.contains(dep_id)) { + const dep = pm.lockfile.buffers.dependencies.items[dep_id]; + const alias = dep.name.slice(buf); + const package_id = pm.lockfile.buffers.resolutions.items[dep_id]; + const resolution = &resolutions[package_id]; + var package_scripts = scripts[package_id]; + + if (try package_scripts.getList( + pm.log, + pm.lockfile, + node_modules_dir, + abs_node_modules_path.items, + alias, + resolution, + )) |scripts_list| { + const entry = try scripts_at_depth.getOrPut(ctx.allocator, node_modules.depth); + if (!entry.found_existing) { + entry.value_ptr.* = .{}; + } + scripts_count += scripts_list.total; + try entry.value_ptr.append(ctx.allocator, scripts_list); + try package_names_to_add.put(ctx.allocator, try ctx.allocator.dupe(u8, alias), {}); + } + } + } + } + + if (scripts_at_depth.count() == 0) Global.exit(0); + + var root_node: *Progress.Node = undefined; + var scripts_node: Progress.Node = undefined; + var progress = &pm.progress; + + if (pm.options.log_level.showProgress()) { + root_node = progress.start("", 0); + progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; + + scripts_node = root_node.start(PackageManager.ProgressStrings.script(), scripts_count); + pm.scripts_node = &scripts_node; + } + + var depth = scripts_at_depth.count(); + while (depth > 0) { + depth -= 1; + const _entry = scripts_at_depth.get(depth); + if (comptime bun.Environment.allow_assert) { + std.debug.assert(_entry != null); + } + if (_entry) |entry| { + for (entry.items) |scripts_list| { + switch (pm.options.log_level) { + inline else => |log_level| try pm.spawnPackageLifecycleScripts(ctx, scripts_list, log_level), + } + + if (pm.options.log_level.showProgress()) { + scripts_node.activate(); + progress.refresh(); + } + } + + while (pm.pending_lifecycle_script_tasks.load(.Monotonic) > 0) { + pm.uws_event_loop.tick(); + } + } + } + + if (pm.options.log_level.showProgress()) { + progress.root.end(); + progress.* = .{}; + } + + const package_json_contents = try pm.root_package_json_file.readToEndAlloc(ctx.allocator, try pm.root_package_json_file.getEndPos()); + defer ctx.allocator.free(package_json_contents); + + const package_json_source = logger.Source.initPathString(PackageManager.package_json_cwd, package_json_contents); + + var package_json = bun.JSON.ParseJSONUTF8(&package_json_source, ctx.log, ctx.allocator) catch |err| { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}, + } + + if (err == error.ParserError and ctx.log.errors > 0) { + Output.prettyErrorln("error: Failed to parse package.json", .{}); + Global.crash(); + } + + Output.panic("{s} parsing package.json", .{ + @errorName(err), + }); + }; + + // now add the package names to lockfile.trustedDependencies and package.json `trustedDependencies` + const names_count = package_names_to_add.count(); + if (comptime Environment.allow_assert) { + std.debug.assert(names_count > 0); + } + + // could be null if these are the first packages to be trusted + if (names_count > 0 and pm.lockfile.trusted_dependencies == null) pm.lockfile.trusted_dependencies = .{}; + + const names = package_names_to_add.keys(); + + try Install.PackageManager.PackageJSONEditor.editTrustedDependencies(ctx.allocator, &package_json, names); + + for (names) |name| { + try pm.lockfile.trusted_dependencies.?.put(ctx.allocator, @truncate(String.Builder.stringHash(name)), {}); + } + + pm.lockfile.saveToDisk(pm.options.lockfile_path); + + var buffer_writer = try bun.js_printer.BufferWriter.init(ctx.allocator); + try buffer_writer.buffer.list.ensureTotalCapacity(ctx.allocator, package_json_contents.len + 1); + buffer_writer.append_newline = package_json_contents.len > 0 and package_json_contents[package_json_contents.len - 1] == '\n'; + var package_json_writer = bun.js_printer.BufferPrinter.init(buffer_writer); + + _ = bun.js_printer.printJSON(@TypeOf(&package_json_writer), &package_json_writer, package_json, &package_json_source) catch |err| { + Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)}); + Global.crash(); + }; + + const new_package_json_contents = package_json_writer.ctx.writtenWithoutTrailingZero(); + + try pm.root_package_json_file.pwriteAll(new_package_json_contents, 0); + std.os.ftruncate(pm.root_package_json_file.handle, new_package_json_contents.len) catch {}; + pm.root_package_json_file.close(); + Global.exit(0); } else if (strings.eqlComptime(subcommand, "ls")) { const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb"); @@ -244,6 +629,8 @@ pub const PackageManagerCommand = struct { const lockfile = load_lockfile.ok.lockfile; var iterator = Lockfile.Tree.Iterator.init(lockfile); + var max_depth: usize = 0; + var directories = std.ArrayList(NodeModulesFolder).init(ctx.allocator); defer directories.deinit(); while (iterator.nextNodeModulesFolder(null)) |node_modules| { @@ -255,17 +642,20 @@ pub const PackageManagerCommand = struct { const dependencies = try ctx.allocator.alloc(DependencyID, node_modules.dependencies.len); bun.copy(DependencyID, dependencies, node_modules.dependencies); + if (max_depth < node_modules.depth + 1) max_depth = node_modules.depth + 1; + try directories.append(.{ .relative_path = path[0..path_len :0], .dependencies = dependencies, .tree_id = node_modules.tree_id, + .depth = node_modules.depth, }); } const first_directory = directories.orderedRemove(0); - // TODO: find max depth beforehand - var more_packages = [_]bool{false} ** 16; + var more_packages = try ctx.allocator.alloc(bool, max_depth); + @memset(more_packages, false); if (first_directory.dependencies.len > 1) more_packages[0] = true; if (strings.leftHasAnyInRight(args, &.{ "-A", "-a", "--all" })) { @@ -355,7 +745,7 @@ fn printNodeModulesFolderStructure( depth: usize, directories: *std.ArrayList(NodeModulesFolder), lockfile: *Lockfile, - more_packages_: [16]bool, + more_packages_: []bool, ) !void { const allocator = lockfile.allocator; var more_packages = more_packages_; diff --git a/src/feature_flags.zig b/src/feature_flags.zig index df9db50a15..27a173f8b2 100644 --- a/src/feature_flags.zig +++ b/src/feature_flags.zig @@ -163,3 +163,5 @@ pub const concurrent_transpiler = !env.isWindows; pub const disable_auto_js_to_ts_in_node_modules = true; pub const runtime_transpiler_cache = true; + +pub const breaking_changes_1_1_0 = false; diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index db572ad47f..8e7a26f194 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -398,7 +398,8 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD if (switch (this.resolution.tag) { // TODO remove extracted files not matching any globs under "files" .github, .local_tarball, .remote_tarball => true, - else => this.package_manager.lockfile.trusted_dependencies.contains(@as(u32, @truncate(Semver.String.Builder.stringHash(name)))), + else => this.package_manager.lockfile.trusted_dependencies != null and + this.package_manager.lockfile.trusted_dependencies.?.contains(@truncate(Semver.String.Builder.stringHash(name))), }) { const json_file = final_dir.openFileZ("package.json", .{ .mode = .read_only }) catch |err| { this.package_manager.log.addErrorFmt( diff --git a/src/install/install.zig b/src/install/install.zig index 523dfd02db..4cb370af30 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -1,4 +1,5 @@ const bun = @import("root").bun; +const FeatureFlags = bun.FeatureFlags; const string = bun.string; const Output = bun.Output; const Global = bun.Global; @@ -181,6 +182,7 @@ pub const ExternalStringMap = extern struct { }; pub const PackageNameHash = u64; // Use String.Builder.stringHash to compute this +pub const TruncatedPackageNameHash = u32; // @truncate String.Builder.stringHash to compute this pub const Aligner = struct { pub fn write(comptime Type: type, comptime Writer: type, writer: Writer, pos: usize) !usize { @@ -564,7 +566,7 @@ const Task = struct { /// An ID that lets us register a callback without keeping the same pointer around pub const Id = struct { pub fn forNPMPackage(package_name: string, package_version: Semver.Version) u64 { - var hasher = bun.Wyhash.init(0); + var hasher = bun.Wyhash11.init(0); hasher.update(package_name); hasher.update("@"); hasher.update(std.mem.asBytes(&package_version)); @@ -572,28 +574,28 @@ const Task = struct { } pub fn forBinLink(package_id: PackageID) u64 { - const hash = bun.Wyhash.hash(0, std.mem.asBytes(&package_id)); + const hash = bun.Wyhash11.hash(0, std.mem.asBytes(&package_id)); return @as(u64, 1 << 61) | @as(u64, @as(u61, @truncate(hash))); } pub fn forManifest(name: string) u64 { - return @as(u64, 2 << 61) | @as(u64, @as(u61, @truncate(bun.Wyhash.hash(0, name)))); + return @as(u64, 2 << 61) | @as(u64, @as(u61, @truncate(bun.Wyhash11.hash(0, name)))); } pub fn forTarball(url: string) u64 { - var hasher = bun.Wyhash.init(0); + var hasher = bun.Wyhash11.init(0); hasher.update(url); return @as(u64, 3 << 61) | @as(u64, @as(u61, @truncate(hasher.final()))); } pub fn forGitClone(url: string) u64 { - var hasher = bun.Wyhash.init(0); + var hasher = bun.Wyhash11.init(0); hasher.update(url); return @as(u64, 4 << 61) | @as(u64, @as(u61, @truncate(hasher.final()))); } pub fn forGitCheckout(url: string, resolved: string) u64 { - var hasher = bun.Wyhash.init(0); + var hasher = bun.Wyhash11.init(0); hasher.update(url); hasher.update("@"); hasher.update(resolved); @@ -639,8 +641,7 @@ const Task = struct { }; switch (package_manifest) { - .cached => unreachable, - .fresh => |manifest| { + .fresh, .cached => |manifest| { this.status = Status.success; this.data = .{ .package_manifest = manifest }; return; @@ -855,6 +856,9 @@ pub const PackageInstall = struct { success: u32 = 0, skipped: u32 = 0, successfully_installed: ?Bitset = null, + + // deduplicated + packages_with_skipped_scripts_set: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) = .{}, }; pub const Method = enum { @@ -1083,14 +1087,10 @@ pub const PackageInstall = struct { return (this.err == error.FileNotFound or this.err == error.ENOENT) and this.step == .opening_cache_dir; } }, - pending: void, - skip: void, pub const Tag = enum { success, fail, - pending, - skip, }; }; @@ -2023,6 +2023,10 @@ pub const PackageManager = struct { // name hash from alias package name -> aliased package dependency version info known_npm_aliases: NpmAliasMap = .{}, + // During `installPackages` we learn exactly what dependencies from --trust + // actually have scripts to run, and we add them to this list + trusted_deps_to_add_to_package_json: std.ArrayListUnmanaged(string) = .{}, + const PreallocatedNetworkTasks = std.BoundedArray(NetworkTask, 1024); const NetworkTaskQueue = std.HashMapUnmanaged(u64, void, IdentityContext(u64), 80); pub var verbose_install = false; @@ -3552,6 +3556,21 @@ pub const PackageManager = struct { return &task.threadpool_task; } + pub fn updateLockfileIfNeeded( + manager: *PackageManager, + load_lockfile_result: Lockfile.LoadFromDiskResult, + ) !void { + if (load_lockfile_result == .ok and load_lockfile_result.ok.serializer_result.packages_need_update) { + const slice = manager.lockfile.packages.slice(); + for (slice.items(.meta)) |*meta| { + // these are possibly updated later, but need to make sure non are zero + meta.setHasInstallScript(false); + } + } + + return; + } + pub fn writeYarnLock(this: *PackageManager) !void { var printer = Lockfile.Printer{ .lockfile = this.lockfile, @@ -4538,11 +4557,24 @@ pub const PackageManager = struct { Global.crash(); }; + const has_scripts = package.scripts.hasAny() or brk: { + const dir = std.fs.path.dirname(data.json_path) orelse ""; + const binding_dot_gyp_path = Path.joinAbsStringZ( + dir, + &[_]string{"binding.gyp"}, + .auto, + ); + + break :brk Syscall.exists(binding_dot_gyp_path); + }; + + package.meta.setHasInstallScript(has_scripts); + package = manager.lockfile.appendPackage(package) catch unreachable; package_id.* = package.meta.id; if (package.dependencies.len > 0) { - manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch unreachable; + manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch bun.outOfMemory(); } return package; @@ -4576,11 +4608,24 @@ pub const PackageManager = struct { Global.crash(); }; + const has_scripts = package.scripts.hasAny() or brk: { + const dir = std.fs.path.dirname(data.json_path) orelse ""; + const binding_dot_gyp_path = Path.joinAbsStringZ( + dir, + &[_]string{"binding.gyp"}, + .auto, + ); + + break :brk Syscall.exists(binding_dot_gyp_path); + }; + + package.meta.setHasInstallScript(has_scripts); + package = manager.lockfile.appendPackage(package) catch unreachable; package_id.* = package.meta.id; if (package.dependencies.len > 0) { - manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch unreachable; + manager.lockfile.scratch.dependency_list_queue.writeItem(package.dependencies) catch bun.outOfMemory(); } return package; @@ -4805,6 +4850,7 @@ pub const PackageManager = struct { }, } } + for (manager.package_json_updates) |*request| { if (strings.eql(request.name, name.slice())) { request.failed = true; @@ -5021,6 +5067,7 @@ pub const PackageManager = struct { continue; } const manifest = task.data.package_manifest; + _ = try manager.manifests.getOrPutValue(manager.allocator, manifest.pkg.name.hash, manifest); const dependency_list_entry = manager.task_queue.getEntry(task.id).?; @@ -5720,6 +5767,10 @@ pub const PackageManager = struct { this.do.run_scripts = false; } + if (cli.trusted) { + this.do.trust_dependencies_from_args = true; + } + this.local_package_features.optional_dependencies = !cli.omit.optional; const disable_progress_bar = default_disable_progress_bar or cli.no_progress; @@ -5796,6 +5847,7 @@ pub const PackageManager = struct { verify_integrity: bool = true, summary: bool = true, install_peer_dependencies: bool = true, + trust_dependencies_from_args: bool = false, }; pub const Enable = struct { @@ -5866,16 +5918,164 @@ pub const PackageManager = struct { } }; - const PackageJSONEditor = struct { + pub const PackageJSONEditor = struct { + const Expr = JSAst.Expr; + const G = JSAst.G; + const E = JSAst.E; + + const trusted_dependencies_string = "trustedDependencies"; + + pub const EditOptions = struct { + exact_versions: bool = false, + add_trusted_dependencies: bool = false, + }; + + pub fn editTrustedDependencies(allocator: std.mem.Allocator, package_json: *Expr, names_to_add: []string) !void { + var len = names_to_add.len; + + var original_trusted_dependencies = brk: { + if (package_json.asProperty(trusted_dependencies_string)) |query| { + if (query.expr.data == .e_array) { + break :brk query.expr.data.e_array.*; + } + } + break :brk E.Array{}; + }; + + for (names_to_add, 0..) |name, i| { + for (original_trusted_dependencies.items.slice()) |item| { + if (item.data == .e_string) { + if (item.data.e_string.eql(string, name)) { + const temp = names_to_add[i]; + names_to_add[i] = names_to_add[len - 1]; + names_to_add[len - 1] = temp; + len -= 1; + break; + } + } + } + } + + var trusted_dependencies: []Expr = &[_]Expr{}; + if (package_json.asProperty(trusted_dependencies_string)) |query| { + if (query.expr.data == .e_array) { + trusted_dependencies = query.expr.data.e_array.items.slice(); + } + } + + const trusted_dependencies_to_add = len; + const new_trusted_deps = brk: { + var deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add); + @memcpy(deps[0..trusted_dependencies.len], trusted_dependencies); + @memset(deps[trusted_dependencies.len..], Expr.empty); + + for (names_to_add[0..len]) |name| { + if (comptime Environment.allow_assert) { + var has_missing = false; + for (deps) |dep| { + if (dep.data == .e_missing) has_missing = true; + } + std.debug.assert(has_missing); + } + + var i = deps.len; + while (i > 0) { + i -= 1; + if (deps[i].data == .e_missing) { + deps[i] = try Expr.init( + E.String, + E.String{ + .data = name, + }, + logger.Loc.Empty, + ).clone(allocator); + break; + } + } + } + + if (comptime Environment.allow_assert) { + for (deps) |dep| std.debug.assert(dep.data != .e_missing); + } + + break :brk deps; + }; + + var needs_new_trusted_dependencies_list = true; + const trusted_dependencies_array: Expr = brk: { + if (package_json.asProperty(trusted_dependencies_string)) |query| { + if (query.expr.data == .e_array) { + needs_new_trusted_dependencies_list = false; + break :brk query.expr; + } + } + + break :brk Expr.init( + E.Array, + E.Array{ + .items = JSAst.ExprNodeList.init(new_trusted_deps), + }, + logger.Loc.Empty, + ); + }; + + if (trusted_dependencies_to_add > 0 and new_trusted_deps.len > 0) { + trusted_dependencies_array.data.e_array.items = JSAst.ExprNodeList.init(new_trusted_deps); + trusted_dependencies_array.data.e_array.alphabetizeStrings(); + } + + if (package_json.data != .e_object or package_json.data.e_object.properties.len == 0) { + var root_properties = try allocator.alloc(JSAst.G.Property, 1); + root_properties[0] = JSAst.G.Property{ + .key = Expr.init( + E.String, + E.String{ + .data = trusted_dependencies_string, + }, + logger.Loc.Empty, + ), + .value = trusted_dependencies_array, + }; + + package_json.* = Expr.init( + E.Object, + E.Object{ + .properties = JSAst.G.Property.List.init(root_properties), + }, + logger.Loc.Empty, + ); + } else if (needs_new_trusted_dependencies_list) { + var root_properties = try allocator.alloc(G.Property, package_json.data.e_object.properties.len + 1); + @memcpy(root_properties[0..package_json.data.e_object.properties.len], package_json.data.e_object.properties.slice()); + root_properties[root_properties.len - 1] = .{ + .key = Expr.init( + E.String, + E.String{ + .data = trusted_dependencies_string, + }, + logger.Loc.Empty, + ), + .value = trusted_dependencies_array, + }; + package_json.* = Expr.init( + E.Object, + E.Object{ + .properties = JSAst.G.Property.List.init(root_properties), + }, + logger.Loc.Empty, + ); + } + } + + /// edits dependencies and trusted dependencies + /// if options.add_trusted_dependencies is true, gets list from PackageManager.trusted_deps_to_add_to_package_json pub fn edit( allocator: std.mem.Allocator, updates: []UpdateRequest, current_package_json: *JSAst.Expr, dependency_list: string, - exact_versions: bool, + options: EditOptions, ) !void { - const G = JSAst.G; - var remaining = updates.len; var replacing: usize = 0; @@ -5885,34 +6085,60 @@ pub const PackageManager = struct { // 3. There is a "dependencies" (or equivalent list), and the package name exists in multiple lists ast_modifier: { // Try to use the existing spot in the dependencies list if possible - for (updates) |*request| { - inline for ([_]string{ "dependencies", "devDependencies", "optionalDependencies" }) |list| { - if (current_package_json.asProperty(list)) |query| { - if (query.expr.data == .e_object) { - if (query.expr.asProperty( - if (request.is_aliased) - request.name - else - request.version.literal.slice(request.version_buf), - )) |value| { - if (value.expr.data == .e_string) { - if (!request.resolved_name.isEmpty() and strings.eql(list, dependency_list)) { - replacing += 1; - } else { - request.e_string = value.expr.data.e_string; - remaining -= 1; - } + { + var original_trusted_dependencies = brk: { + if (!options.add_trusted_dependencies) break :brk E.Array{}; + if (current_package_json.asProperty(trusted_dependencies_string)) |query| { + if (query.expr.data == .e_array) { + // not modifying + break :brk query.expr.data.e_array.*; + } + } + break :brk E.Array{}; + }; + + if (options.add_trusted_dependencies) { + for (PackageManager.instance.trusted_deps_to_add_to_package_json.items, 0..) |trusted_package_name, i| { + for (original_trusted_dependencies.items.slice()) |item| { + if (item.data == .e_string) { + if (item.data.e_string.eql(string, trusted_package_name)) { + allocator.free(PackageManager.instance.trusted_deps_to_add_to_package_json.swapRemove(i)); + break; } - break; - } else { - if (request.version.tag == .github or request.version.tag == .git) { - for (query.expr.data.e_object.properties.slice()) |item| { - if (item.value) |v| { - const url = request.version.literal.slice(request.version_buf); - if (v.data == .e_string and v.data.e_string.eql(string, url)) { - request.e_string = v.data.e_string; - remaining -= 1; - break; + } + } + } + } + + for (updates) |*request| { + inline for ([_]string{ "dependencies", "devDependencies", "optionalDependencies" }) |list| { + if (current_package_json.asProperty(list)) |query| { + if (query.expr.data == .e_object) { + if (query.expr.asProperty( + if (request.is_aliased) + request.name + else + request.version.literal.slice(request.version_buf), + )) |value| { + if (value.expr.data == .e_string) { + if (!request.resolved_name.isEmpty() and strings.eqlLong(list, dependency_list, true)) { + replacing += 1; + } else { + request.e_string = value.expr.data.e_string; + remaining -= 1; + } + } + break; + } else { + if (request.version.tag == .github or request.version.tag == .git) { + for (query.expr.data.e_object.properties.slice()) |item| { + if (item.value) |v| { + const url = request.version.literal.slice(request.version_buf); + if (v.data == .e_string and v.data.e_string.eql(string, url)) { + request.e_string = v.data.e_string; + remaining -= 1; + break; + } } } } @@ -5937,6 +6163,55 @@ pub const PackageManager = struct { bun.copy(G.Property, new_dependencies, dependencies); @memset(new_dependencies[dependencies.len..], G.Property{}); + var trusted_dependencies: []Expr = &[_]Expr{}; + if (options.add_trusted_dependencies) { + if (current_package_json.asProperty(trusted_dependencies_string)) |query| { + if (query.expr.data == .e_array) { + trusted_dependencies = query.expr.data.e_array.items.slice(); + } + } + } + + const trusted_dependencies_to_add = PackageManager.instance.trusted_deps_to_add_to_package_json.items.len; + const new_trusted_deps = brk: { + if (!options.add_trusted_dependencies or trusted_dependencies_to_add == 0) break :brk &[_]Expr{}; + + var deps = try allocator.alloc(Expr, trusted_dependencies.len + trusted_dependencies_to_add); + @memcpy(deps[0..trusted_dependencies.len], trusted_dependencies); + @memset(deps[trusted_dependencies.len..], Expr.empty); + + for (PackageManager.instance.trusted_deps_to_add_to_package_json.items) |package_name| { + if (comptime Environment.allow_assert) { + var has_missing = false; + for (deps) |dep| { + if (dep.data == .e_missing) has_missing = true; + } + std.debug.assert(has_missing); + } + + var i = deps.len; + while (i > 0) { + i -= 1; + if (deps[i].data == .e_missing) { + deps[i] = try Expr.init( + E.String, + E.String{ + .data = package_name, + }, + logger.Loc.Empty, + ).clone(allocator); + break; + } + } + } + + if (comptime Environment.allow_assert) { + for (deps) |dep| std.debug.assert(dep.data != .e_missing); + } + + break :brk deps; + }; + outer: for (updates) |*request| { if (request.e_string != null) continue; defer if (comptime Environment.allow_assert) std.debug.assert(request.e_string != null); @@ -6033,8 +6308,37 @@ pub const PackageManager = struct { if (new_dependencies.len > 1) dependencies_object.data.e_object.alphabetizeProperties(); + var needs_new_trusted_dependencies_list = true; + const trusted_dependencies_array: Expr = brk: { + if (!options.add_trusted_dependencies or trusted_dependencies_to_add == 0) { + needs_new_trusted_dependencies_list = false; + break :brk Expr.empty; + } + if (current_package_json.asProperty(trusted_dependencies_string)) |query| { + if (query.expr.data == .e_array) { + needs_new_trusted_dependencies_list = false; + break :brk query.expr; + } + } + + break :brk Expr.init( + E.Array, + E.Array{ + .items = JSAst.ExprNodeList.init(new_trusted_deps), + }, + logger.Loc.Empty, + ); + }; + + if (options.add_trusted_dependencies and trusted_dependencies_to_add > 0) { + trusted_dependencies_array.data.e_array.items = JSAst.ExprNodeList.init(new_trusted_deps); + if (new_trusted_deps.len > 1) { + trusted_dependencies_array.data.e_array.alphabetizeStrings(); + } + } + if (current_package_json.data != .e_object or current_package_json.data.e_object.properties.len == 0) { - var root_properties = try allocator.alloc(JSAst.G.Property, 1); + var root_properties = try allocator.alloc(JSAst.G.Property, if (options.add_trusted_dependencies) 2 else 1); root_properties[0] = JSAst.G.Property{ .key = JSAst.Expr.init( JSAst.E.String, @@ -6045,44 +6349,102 @@ pub const PackageManager = struct { ), .value = dependencies_object, }; - current_package_json.* = JSAst.Expr.init(JSAst.E.Object, JSAst.E.Object{ .properties = JSAst.G.Property.List.init(root_properties) }, logger.Loc.Empty); - } else if (needs_new_dependency_list) { - var root_properties = try allocator.alloc(JSAst.G.Property, current_package_json.data.e_object.properties.len + 1); - bun.copy(JSAst.G.Property, root_properties, current_package_json.data.e_object.properties.slice()); - root_properties[root_properties.len - 1] = .{ - .key = JSAst.Expr.init( - JSAst.E.String, - JSAst.E.String{ - .data = dependency_list, - }, - logger.Loc.Empty, - ), - .value = dependencies_object, - }; + + if (options.add_trusted_dependencies) { + root_properties[1] = JSAst.G.Property{ + .key = Expr.init( + E.String, + E.String{ + .data = trusted_dependencies_string, + }, + logger.Loc.Empty, + ), + .value = trusted_dependencies_array, + }; + } + current_package_json.* = JSAst.Expr.init( JSAst.E.Object, - JSAst.E.Object{ - .properties = JSAst.G.Property.List.init(root_properties), - }, + JSAst.E.Object{ .properties = JSAst.G.Property.List.init(root_properties) }, logger.Loc.Empty, ); + } else { + if (needs_new_dependency_list and needs_new_trusted_dependencies_list) { + var root_properties = try allocator.alloc(G.Property, current_package_json.data.e_object.properties.len + 2); + @memcpy(root_properties[0..current_package_json.data.e_object.properties.len], current_package_json.data.e_object.properties.slice()); + root_properties[root_properties.len - 2] = .{ + .key = Expr.init(E.String, E.String{ + .data = dependency_list, + }, logger.Loc.Empty), + .value = dependencies_object, + }; + root_properties[root_properties.len - 1] = .{ + .key = Expr.init( + E.String, + E.String{ + .data = trusted_dependencies_string, + }, + logger.Loc.Empty, + ), + .value = trusted_dependencies_array, + }; + current_package_json.* = Expr.init( + E.Object, + E.Object{ + .properties = G.Property.List.init(root_properties), + }, + logger.Loc.Empty, + ); + } else if (needs_new_dependency_list or needs_new_trusted_dependencies_list) { + var root_properties = try allocator.alloc(JSAst.G.Property, current_package_json.data.e_object.properties.len + 1); + @memcpy(root_properties[0..current_package_json.data.e_object.properties.len], current_package_json.data.e_object.properties.slice()); + root_properties[root_properties.len - 1] = .{ + .key = JSAst.Expr.init( + JSAst.E.String, + JSAst.E.String{ + .data = if (needs_new_dependency_list) dependency_list else trusted_dependencies_string, + }, + logger.Loc.Empty, + ), + .value = if (needs_new_dependency_list) dependencies_object else trusted_dependencies_array, + }; + current_package_json.* = JSAst.Expr.init( + JSAst.E.Object, + JSAst.E.Object{ + .properties = JSAst.G.Property.List.init(root_properties), + }, + logger.Loc.Empty, + ); + } } } for (updates) |*request| { if (request.e_string) |e_string| { e_string.data = switch (request.resolution.tag) { - .npm => if (request.version.tag == .dist_tag and request.version.literal.isEmpty()) - switch (exact_versions) { - false => std.fmt.allocPrint(allocator, "^{}", .{ - request.resolution.value.npm.version.fmt(request.version_buf), - }) catch unreachable, - true => std.fmt.allocPrint(allocator, "{}", .{ - request.resolution.value.npm.version.fmt(request.version_buf), - }) catch unreachable, + .npm => brk: { + if (comptime FeatureFlags.breaking_changes_1_1_0) { + if (request.version.tag == .dist_tag) { + const fmt = if (options.exact_versions) "{}" else "^{}"; + break :brk try std.fmt.allocPrint(allocator, fmt, .{ + request.resolution.value.npm.version.fmt(request.version_buf), + }); + } + break :brk null; + } else { + break :brk if (request.version.tag == .dist_tag and request.version.literal.isEmpty()) + switch (options.exact_versions) { + false => std.fmt.allocPrint(allocator, "^{}", .{ + request.resolution.value.npm.version.fmt(request.version_buf), + }) catch unreachable, + true => std.fmt.allocPrint(allocator, "{}", .{ + request.resolution.value.npm.version.fmt(request.version_buf), + }) catch unreachable, + } + else + null; } - else - null, + }, .uninitialized => switch (request.version.tag) { .uninitialized => try allocator.dupe(u8, latest), else => null, @@ -6878,6 +7240,7 @@ pub const PackageManager = struct { clap.parseParam("--no-summary Don't print a summary") catch unreachable, clap.parseParam("--no-verify Skip verifying integrity of newly downloaded packages") catch unreachable, clap.parseParam("--ignore-scripts Skip lifecycle scripts in the project's package.json (dependency scripts are never run)") catch unreachable, + clap.parseParam("--trust Add to trustedDependencies in the project's package.json and install the package(s)") catch unreachable, clap.parseParam("-g, --global Install globally") catch unreachable, clap.parseParam("--cwd Set a specific cwd") catch unreachable, clap.parseParam("--backend Platform-specific optimizations for installing dependencies. " ++ platform_specific_backend_label) catch unreachable, @@ -6950,6 +7313,7 @@ pub const PackageManager = struct { no_progress: bool = false, no_verify: bool = false, ignore_scripts: bool = false, + trusted: bool = false, no_summary: bool = false, link_native_bins: []const string = &[_]string{}, @@ -7170,6 +7534,7 @@ pub const PackageManager = struct { cli.silent = args.flag("--silent"); cli.verbose = args.flag("--verbose") or Output.is_verbose; cli.ignore_scripts = args.flag("--ignore-scripts"); + cli.trusted = args.flag("--trust"); cli.no_summary = args.flag("--no-summary"); // link and unlink default to not saving, all others default to @@ -7260,7 +7625,7 @@ pub const PackageManager = struct { // This must be cloned to handle when the AST store resets e_string: ?*JSAst.E.String = null, - pub const Array = std.BoundedArray(UpdateRequest, 64); + pub const Array = std.ArrayListUnmanaged(UpdateRequest); pub inline fn matches(this: PackageManager.UpdateRequest, dependency: Dependency, string_buf: []const u8) bool { return this.name_hash == if (this.name.len == 0) @@ -7269,6 +7634,20 @@ pub const PackageManager = struct { dependency.name_hash; } + /// It is incorrect to call this function before Lockfile.cleanWithLogger() because + /// resolved_name should be populated if possible. + /// + /// `this` needs to be a pointer! If `this` is a copy and the name returned from + /// resolved_name is inlined, you will return a pointer to stack memory. + pub fn getResolvedName(this: *UpdateRequest) string { + return if (this.is_aliased) + this.name + else if (this.resolved_name.isEmpty()) + this.version.literal.slice(this.version_buf) + else + this.resolved_name.slice(this.version_buf); + } + pub fn parse( allocator: std.mem.Allocator, log: *logger.Log, @@ -7357,13 +7736,13 @@ pub const PackageManager = struct { request.name_hash = String.Builder.stringHash(version.literal.slice(input)); } - for (update_requests.constSlice()) |*prev| { + for (update_requests.items) |*prev| { if (prev.name_hash == request.name_hash and request.name.len == prev.name.len) continue :outer; } - update_requests.append(request) catch break; + update_requests.append(allocator, request) catch bun.outOfMemory(); } - return update_requests.slice(); + return update_requests.items; } }; @@ -7409,7 +7788,8 @@ pub const PackageManager = struct { comptime op: Lockfile.Package.Diff.Op, comptime log_level: Options.LogLevel, ) !void { - var update_requests = try UpdateRequest.Array.init(0); + var update_requests = UpdateRequest.Array.initCapacity(manager.allocator, 64) catch bun.outOfMemory(); + defer update_requests.deinit(manager.allocator); if (manager.options.positionals.len <= 1) { const examples_to_print: [3]string = undefined; @@ -7583,7 +7963,9 @@ pub const PackageManager = struct { updates, ¤t_package_json, dependency_list, - manager.options.enable.exact_versions, + .{ + .exact_versions = manager.options.enable.exact_versions, + }, ); manager.package_json_updates = updates; }, @@ -7646,7 +8028,10 @@ pub const PackageManager = struct { updates, ¤t_package_json, dependency_list, - manager.options.enable.exact_versions, + .{ + .exact_versions = manager.options.enable.exact_versions, + .add_trusted_dependencies = manager.options.do.trust_dependencies_from_args, + }, ); var buffer_writer_two = try JSPrinter.BufferWriter.init(ctx.allocator); try buffer_writer_two.buffer.list.ensureTotalCapacity(ctx.allocator, new_package_json_source.len + 1); @@ -7733,9 +8118,9 @@ pub const PackageManager = struct { } } - var cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - var package_json_cwd_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - var package_json_cwd: string = ""; + var cwd_buf: bun.PathBuffer = undefined; + var package_json_cwd_buf: bun.PathBuffer = undefined; + pub var package_json_cwd: string = ""; pub inline fn install(ctx: Command.Context) !void { var manager = try init(ctx, .install); @@ -7810,6 +8195,8 @@ pub const PackageManager = struct { tree_id: Lockfile.Tree.Id, }) = .{}, + trusted_dependencies_from_update_requests: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void), + /// Increments the number of installed packages for a tree id and runs available scripts /// if the tree is finished. pub fn incrementTreeInstallCount(this: *PackageInstaller, tree_id: Lockfile.Tree.Id, comptime log_level: Options.LogLevel) void { @@ -7849,7 +8236,7 @@ pub const PackageManager = struct { while (i > 0) { i -= 1; const entry = this.pending_lifecycle_scripts.items[i]; - const name = entry.list.first().package_name; + const name = entry.list.package_name; const tree_id = entry.tree_id; if (this.canRunScripts(tree_id)) { _ = this.pending_lifecycle_scripts.swapRemove(i); @@ -7885,7 +8272,7 @@ pub const PackageManager = struct { return bun.todo(@src(), {}); } for (this.pending_lifecycle_scripts.items) |entry| { - const package_name = entry.list.first().package_name; + const package_name = entry.list.package_name; while (LifecycleScriptSubprocess.alive_count.load(.Monotonic) >= this.manager.options.max_concurrent_lifecycle_scripts) { if (PackageManager.verbose_install) { if (PackageManager.hasEnoughTimePassedBetweenWaitingMessages()) Output.prettyErrorln("[PackageManager] waiting for {d} scripts\n", .{LifecycleScriptSubprocess.alive_count.load(.Monotonic)}); @@ -7960,6 +8347,7 @@ pub const PackageManager = struct { allocator.free(this.tree_install_counts); this.tree_ids_to_trees_the_id_depends_on.deinit(allocator); this.node_modules_folder_path.deinit(); + this.trusted_dependencies_from_update_requests.deinit(allocator); } /// Call when you mutate the length of `lockfile.packages` @@ -8139,7 +8527,13 @@ pub const PackageManager = struct { installer.cache_dir = directory; } }, - else => return, + else => { + if (comptime Environment.allow_assert) { + @panic("bad"); + } + this.incrementTreeInstallCount(this.current_tree_id, log_level); + return; + }, } const needs_install = this.force_install or this.skip_verify_installed_version_number or !installer.verify(resolution, buf); @@ -8204,14 +8598,37 @@ pub const PackageManager = struct { } } - if (resolution.tag == .workspace or this.lockfile.hasTrustedDependency(alias)) { - this.enqueuePackageScriptsToLockfile( + const name_hash: TruncatedPackageNameHash = @truncate(this.lockfile.buffers.dependencies.items[dependency_id].name_hash); + const is_trusted, const is_trusted_through_update_request = brk: { + if (this.trusted_dependencies_from_update_requests.contains(name_hash)) break :brk .{ true, true }; + if (this.lockfile.hasTrustedDependency(alias)) break :brk .{ true, false }; + break :brk .{ false, false }; + }; + + if (resolution.tag == .workspace or is_trusted) { + if (this.enqueueLifecycleScripts( alias, log_level, package_id, - destination_dir_subpath, resolution, - ); + )) { + if (is_trusted_through_update_request) { + this.manager.trusted_deps_to_add_to_package_json.append( + this.manager.allocator, + this.manager.allocator.dupe(u8, alias) catch bun.outOfMemory(), + ) catch bun.outOfMemory(); + + if (this.lockfile.trusted_dependencies == null) this.lockfile.trusted_dependencies = .{}; + this.lockfile.trusted_dependencies.?.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); + } + } + } + + if (resolution.tag != .workspace and !is_trusted and this.lockfile.packages.get(package_id).meta.hasInstallScript()) { + if (comptime log_level.isVerbose()) { + Output.prettyError("Blocked scripts for: {s}@{}\n", .{ alias, resolution.fmt(this.lockfile.buffers.string_bytes.items) }); + } + this.summary.packages_with_skipped_scripts_set.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); } this.incrementTreeInstallCount(this.current_tree_id, log_level); @@ -8279,189 +8696,164 @@ pub const PackageManager = struct { this.summary.fail += 1; }, } - } else if (cause.err == error.DanglingSymlink) { - Output.prettyErrorln( - "error: {s} \"link:{s}\" not found (try running 'bun link' in the intended package's folder)", - .{ @errorName(cause.err), this.names[package_id].slice(buf) }, - ); - this.summary.fail += 1; - } else if (cause.err == error.AccessDenied) { - // there are two states this can happen - // - Access Denied because node_modules/ is unwritable - // - Access Denied because this specific package is unwritable - // in the case of the former, the logs are extremely noisy, so we - // will exit early, otherwise set a flag to not re-stat - const Singleton = struct { - var node_modules_is_ok = false; - }; - if (!Singleton.node_modules_is_ok) { - const stat = bun.sys.fstat(bun.toFD(this.node_modules_folder.fd)).unwrap() catch |err| { - Output.err("EACCES", "Permission denied while installing {s}", .{ - this.names[package_id].slice(buf), - }); - if (Environment.isDebug) { - Output.err(err, "Failed to stat node_modules", .{}); - } - Global.exit(1); - }; - - const is_writable = if (Environment.isWindows or stat.uid == bun.C.getuid()) - stat.mode & bun.S.IWUSR > 0 - else if (stat.gid == bun.C.getgid()) - stat.mode & bun.S.IWGRP > 0 - else - stat.mode & bun.S.IWOTH > 0; - - if (!is_writable) { - Output.err("EACCES", "Permission denied while writing packages into node_modules.", .{}); - Global.exit(1); - } - Singleton.node_modules_is_ok = true; - } - - Output.err("EACCES", "Permission denied while installing {s}", .{ - this.names[package_id].slice(buf), - }); - - this.summary.fail += 1; } else { - Output.prettyErrorln( - "error: {s} installing {s}", - .{ @errorName(cause.err), this.names[package_id].slice(buf) }, - ); - this.summary.fail += 1; + // even if the package failed to install, we still need to increment the install + // counter for this tree + this.incrementTreeInstallCount(this.current_tree_id, log_level); + if (cause.err == error.DanglingSymlink) { + Output.prettyErrorln( + "error: {s} \"link:{s}\" not found (try running 'bun link' in the intended package's folder)", + .{ @errorName(cause.err), this.names[package_id].slice(buf) }, + ); + this.summary.fail += 1; + } else if (cause.err == error.AccessDenied) { + // there are two states this can happen + // - Access Denied because node_modules/ is unwritable + // - Access Denied because this specific package is unwritable + // in the case of the former, the logs are extremely noisy, so we + // will exit early, otherwise set a flag to not re-stat + const Singleton = struct { + var node_modules_is_ok = false; + }; + if (!Singleton.node_modules_is_ok) { + const stat = bun.sys.fstat(bun.toFD(this.node_modules_folder.fd)).unwrap() catch |err| { + Output.err("EACCES", "Permission denied while installing {s}", .{ + this.names[package_id].slice(buf), + }); + if (Environment.isDebug) { + Output.err(err, "Failed to stat node_modules", .{}); + } + Global.exit(1); + }; + + const is_writable = if (Environment.isWindows or stat.uid == bun.C.getuid()) + stat.mode & bun.S.IWUSR > 0 + else if (stat.gid == bun.C.getgid()) + stat.mode & bun.S.IWGRP > 0 + else + stat.mode & bun.S.IWOTH > 0; + + if (!is_writable) { + Output.err("EACCES", "Permission denied while writing packages into node_modules.", .{}); + Global.exit(1); + } + Singleton.node_modules_is_ok = true; + } + + Output.err("EACCES", "Permission denied while installing {s}", .{ + this.names[package_id].slice(buf), + }); + + this.summary.fail += 1; + } else { + Output.prettyErrorln( + "error: {s} installing {s}", + .{ @errorName(cause.err), this.names[package_id].slice(buf) }, + ); + this.summary.fail += 1; + } } }, - else => {}, } } else { - if (this.manager.summary.new_trusted_dependencies.contains(@truncate(String.Builder.stringHash(alias)))) { - // these are packages that are installed but haven't run lifecycle scripts because they weren't - // in `trustedDependencies` - this.enqueuePackageScriptsToLockfile( + const name_hash: TruncatedPackageNameHash = @truncate(this.lockfile.buffers.dependencies.items[dependency_id].name_hash); + const is_trusted, const is_trusted_through_update_request, const add_to_lockfile = brk: { + // trusted through a --trust dependency. need to enqueue scripts, write to package.json, and add to lockfile + if (this.trusted_dependencies_from_update_requests.contains(name_hash)) break :brk .{ true, true, true }; + + if (this.manager.summary.added_trusted_dependencies.get(name_hash)) |should_add_to_lockfile| { + // is a new trusted dependency. need to enqueue scripts and maybe add to lockfile + break :brk .{ true, false, should_add_to_lockfile }; + } + break :brk .{ false, false, false }; + }; + + if (is_trusted) { + if (this.enqueueLifecycleScripts( alias, log_level, package_id, - destination_dir_subpath, resolution, - ); - } + )) { + if (is_trusted_through_update_request) { + this.manager.trusted_deps_to_add_to_package_json.append( + this.manager.allocator, + this.manager.allocator.dupe(u8, alias) catch bun.outOfMemory(), + ) catch bun.outOfMemory(); + } - this.incrementTreeInstallCount(this.current_tree_id, log_level); + if (add_to_lockfile) { + if (this.lockfile.trusted_dependencies == null) this.lockfile.trusted_dependencies = .{}; + this.lockfile.trusted_dependencies.?.put(this.manager.allocator, name_hash, {}) catch bun.outOfMemory(); + } + } + } } } - fn enqueuePackageScriptsToLockfile( + // returns true if scripts are enqueued + fn enqueueLifecycleScripts( this: *PackageInstaller, folder_name: string, comptime log_level: Options.LogLevel, package_id: PackageID, - destination_dir_subpath: [:0]const u8, resolution: *const Resolution, - ) void { - const buf = this.lockfile.buffers.string_bytes.items; + ) bool { var scripts: Package.Scripts = this.lockfile.packages.items(.scripts)[package_id]; - var path_buf_to_use: [bun.MAX_PATH_BYTES * 2]u8 = undefined; + const scripts_list = scripts.getList( + this.manager.log, + this.lockfile, + this.node_modules_folder, + this.node_modules_folder_path.items, + folder_name, + resolution, + ) catch |err| { + if (comptime log_level != .silent) { + const fmt = "\nerror: failed to enqueue lifecycle scripts for {s}: {s}\n"; + const args = .{ folder_name, @errorName(err) }; - if (scripts.hasAny()) { - const add_node_gyp_rebuild_script = if (this.lockfile.hasTrustedDependency(folder_name) and - scripts.install.isEmpty() and - scripts.preinstall.isEmpty()) - brk: { - const binding_dot_gyp_path = Path.joinAbsStringZ( - this.node_modules_folder_path.items, - &[_]string{ folder_name, "binding.gyp" }, - .auto, + if (comptime log_level.showProgress()) { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, + } + } else { + Output.prettyErrorln(fmt, args); + } + } + + if (this.manager.options.enable.fail_early) { + Global.exit(1); + } + + Output.flush(); + this.summary.fail += 1; + return false; + }; + + if (scripts_list == null) return false; + + if (this.manager.options.do.run_scripts) { + this.manager.total_scripts += scripts_list.?.total; + if (this.manager.scripts_node) |scripts_node| { + this.manager.setNodeName( + scripts_node, + scripts_list.?.package_name, + PackageManager.ProgressStrings.script_emoji, + true, ); - - break :brk Syscall.exists(binding_dot_gyp_path); - } else false; - - const cwd = Path.joinAbsStringBufZTrailingSlash( - this.node_modules_folder_path.items, - &path_buf_to_use, - &[_]string{destination_dir_subpath}, - .auto, - ); - - if (scripts.enqueue( - this.lockfile, - buf, - cwd, - folder_name, - resolution.tag, - add_node_gyp_rebuild_script, - )) |scripts_list| { - if (this.manager.options.do.run_scripts) { - this.manager.total_scripts += scripts_list.total; - if (this.manager.scripts_node) |scripts_node| { - this.manager.setNodeName( - scripts_node, - scripts_list.items[scripts_list.first_index].?.package_name, - PackageManager.ProgressStrings.script_emoji, - true, - ); - scripts_node.setEstimatedTotalItems(scripts_node.unprotected_estimated_total_items + scripts_list.total); - } - this.pending_lifecycle_scripts.append(this.manager.allocator, .{ - .list = scripts_list, - .tree_id = this.current_tree_id, - }) catch bun.outOfMemory(); - } + scripts_node.setEstimatedTotalItems(scripts_node.unprotected_estimated_total_items + scripts_list.?.total); } - } else if (!scripts.filled) { - const scripts_list = scripts.enqueueFromPackageJSON( - this.manager.log, - this.lockfile, - this.node_modules_folder, - this.node_modules_folder_path.items, - destination_dir_subpath, - folder_name, - resolution, - ) catch |err| { - if (comptime log_level != .silent) { - const fmt = "\nerror: failed to parse life-cycle scripts for {s}: {s}\n"; - const args = .{ folder_name, @errorName(err) }; + this.pending_lifecycle_scripts.append(this.manager.allocator, .{ + .list = scripts_list.?, + .tree_id = this.current_tree_id, + }) catch bun.outOfMemory(); - if (comptime log_level.showProgress()) { - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); - }, - } - } else { - Output.prettyErrorln(fmt, args); - } - } - - if (this.manager.options.enable.fail_early) { - Global.exit(1); - } - - Output.flush(); - this.summary.fail += 1; - return; - }; - - if (this.manager.options.do.run_scripts) { - if (scripts_list) |list| { - this.manager.total_scripts += list.total; - if (this.manager.scripts_node) |scripts_node| { - this.manager.setNodeName( - scripts_node, - list.items[list.first_index].?.package_name, - PackageManager.ProgressStrings.script_emoji, - true, - ); - scripts_node.setEstimatedTotalItems(scripts_node.unprotected_estimated_total_items + list.total); - } - this.pending_lifecycle_scripts.append(this.manager.allocator, .{ - .list = list, - .tree_id = this.current_tree_id, - }) catch bun.outOfMemory(); - } - } + return true; } + + return false; } pub fn installPackage( @@ -8605,6 +8997,28 @@ pub const PackageManager = struct { } } + fn addDependencyAndDependenciesToSet( + name_hash_set: *std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void), + lockfile: *Lockfile, + dep_name_hash: PackageNameHash, + dependencies_slice: Lockfile.DependencySlice, + ) void { + name_hash_set.put(lockfile.allocator, @truncate(dep_name_hash), {}) catch bun.outOfMemory(); + + const begin = dependencies_slice.off; + const end = begin +| dependencies_slice.len; + var dep_id = begin; + while (dep_id < end) : (dep_id += 1) { + const dep = lockfile.buffers.dependencies.items[dep_id]; + + const package_id = lockfile.buffers.resolutions.items[dep_id]; + if (package_id == invalid_package_id) continue; + + const dependency_slice = lockfile.packages.items(.dependencies)[package_id]; + addDependencyAndDependenciesToSet(name_hash_set, lockfile, dep.name_hash, dependency_slice); + } + } + pub fn enqueueTarballForReading( this: *PackageManager, dependency_id: DependencyID, @@ -8790,6 +9204,32 @@ pub const PackageManager = struct { }; }; + const trusted_dependencies_from_update_requests: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) = trusted_deps: { + + // find all deps originating from --trust packages from cli + var set: std.AutoArrayHashMapUnmanaged(TruncatedPackageNameHash, void) = .{}; + if (this.options.do.trust_dependencies_from_args and this.lockfile.packages.len > 0) { + const root_deps = this.lockfile.packages.items(.dependencies)[0]; + var dep_id = root_deps.off; + const end = dep_id +| root_deps.len; + while (dep_id < end) : (dep_id += 1) { + const root_dep = this.lockfile.buffers.dependencies.items[dep_id]; + for (this.package_json_updates) |request| { + if (request.matches(root_dep, this.lockfile.buffers.string_bytes.items)) { + const package_id = this.lockfile.buffers.resolutions.items[dep_id]; + if (package_id == invalid_package_id) continue; + + const dependency_slice = this.lockfile.packages.items(.dependencies)[package_id]; + addDependencyAndDependenciesToSet(&set, this.lockfile, root_dep.name_hash, dependency_slice); + break; + } + } + } + } + + break :trusted_deps set; + }; + break :brk PackageInstaller{ .manager = this, .options = &this.options, @@ -8823,6 +9263,7 @@ pub const PackageManager = struct { .tree_ids_to_trees_the_id_depends_on = tree_ids_to_trees_the_id_depends_on, .completed_trees = completed_trees, .tree_install_counts = tree_install_counts, + .trusted_dependencies_from_update_requests = trusted_dependencies_from_update_requests, }; }; @@ -9003,6 +9444,41 @@ pub const PackageManager = struct { manager.downloads_node = null; } + pub fn loadRootLifecycleScripts(this: *PackageManager, root_package: Package) void { + const binding_dot_gyp_path = Path.joinAbsStringZ( + Fs.FileSystem.instance.top_level_dir, + &[_]string{"binding.gyp"}, + .auto, + ); + if (comptime Environment.allow_assert) { + std.debug.assert(root_package.scripts.filled); + } + if (root_package.scripts.hasAny()) { + const add_node_gyp_rebuild_script = root_package.scripts.install.isEmpty() and root_package.scripts.preinstall.isEmpty() and Syscall.exists(binding_dot_gyp_path); + + this.root_lifecycle_scripts = root_package.scripts.createList( + this.lockfile, + this.lockfile.buffers.string_bytes.items, + strings.withoutTrailingSlash(FileSystem.instance.top_level_dir), + root_package.name.slice(this.lockfile.buffers.string_bytes.items), + .root, + add_node_gyp_rebuild_script, + ); + } else { + if (Syscall.exists(binding_dot_gyp_path)) { + // no scripts exist but auto node gyp script needs to be added + this.root_lifecycle_scripts = root_package.scripts.createList( + this.lockfile, + this.lockfile.buffers.string_bytes.items, + strings.withoutTrailingSlash(FileSystem.instance.top_level_dir), + root_package.name.slice(this.lockfile.buffers.string_bytes.items), + .root, + true, + ); + } + } + } + fn installWithManager( manager: *PackageManager, ctx: Command.Context, @@ -9020,6 +9496,8 @@ pub const PackageManager = struct { else .{ .not_found = {} }; + try manager.updateLockfileIfNeeded(load_lockfile_result); + var root = Lockfile.Package{}; var needs_new_lockfile = load_lockfile_result != .ok or (load_lockfile_result.ok.lockfile.buffers.dependencies.items.len == 0 and manager.package_json_updates.len > 0); @@ -9108,11 +9586,7 @@ pub const PackageManager = struct { mapping, ); - had_any_diffs = had_any_diffs or manager.summary.hasDiffs(); - - if (manager.summary.new_trusted_dependencies.count() > 0) { - needs_new_lockfile = true; - } + had_any_diffs = manager.summary.hasDiffs(); if (had_any_diffs) { var builder_ = manager.lockfile.stringBuilder(); @@ -9366,16 +9840,93 @@ pub const PackageManager = struct { manager.lockfile.verifyResolutions(manager.options.local_package_features, manager.options.remote_package_features, log_level); } - const did_meta_hash_change = try manager.lockfile.hasMetaHashChanged( - PackageManager.verbose_install or manager.options.do.print_meta_hash_string, - ); + { + // append scripts to lockfile before generating new metahash + manager.loadRootLifecycleScripts(root); - const should_save_lockfile = did_meta_hash_change or had_any_diffs or needs_new_lockfile or manager.package_json_updates.len > 0; + if (manager.root_lifecycle_scripts) |root_scripts| { + root_scripts.appendToLockfile(manager.lockfile); + } + + const packages = manager.lockfile.packages.slice(); + for (packages.items(.resolution), packages.items(.meta), packages.items(.scripts)) |resolution, meta, scripts| { + if (resolution.tag == .workspace) { + if (meta.hasInstallScript()) { + if (scripts.hasAny()) { + const first_index, _, const entries = scripts.getScriptEntries( + manager.lockfile, + manager.lockfile.buffers.string_bytes.items, + .workspace, + false, + ); + + if (comptime Environment.allow_assert) { + std.debug.assert(first_index != -1); + } + + if (first_index != -1) { + inline for (entries, 0..) |maybe_entry, i| { + if (maybe_entry) |entry| { + @field(manager.lockfile.scripts, Lockfile.Scripts.names[i]).append( + manager.lockfile.allocator, + entry, + ) catch bun.outOfMemory(); + } + } + } + } else { + const first_index, _, const entries = scripts.getScriptEntries( + manager.lockfile, + manager.lockfile.buffers.string_bytes.items, + .workspace, + true, + ); + + if (comptime Environment.allow_assert) { + std.debug.assert(first_index != -1); + } + + inline for (entries, 0..) |maybe_entry, i| { + if (maybe_entry) |entry| { + @field(manager.lockfile.scripts, Lockfile.Scripts.names[i]).append( + manager.lockfile.allocator, + entry, + ) catch bun.outOfMemory(); + } + } + } + } + } + } + } if (manager.options.global) { try manager.setupGlobalDir(&ctx); } + const packages_len_before_install = manager.lockfile.packages.len; + + var install_summary = PackageInstall.Summary{}; + if (manager.options.do.install_packages) { + install_summary = try manager.installPackages( + ctx, + log_level, + ); + } + + const did_meta_hash_change = try manager.lockfile.hasMetaHashChanged( + PackageManager.verbose_install or manager.options.do.print_meta_hash_string, + @min(packages_len_before_install, manager.lockfile.packages.len), + ); + + const should_save_lockfile = did_meta_hash_change or + had_any_diffs or + needs_new_lockfile or + + // this will handle new trusted dependencies added through --trust + manager.package_json_updates.len > 0 or + (load_lockfile_result == .ok and load_lockfile_result.ok.serializer_result.packages_need_update); + if (did_meta_hash_change and manager.options.enable.frozen_lockfile) { if (comptime log_level != .silent) { Output.prettyErrorln("error: lockfile had changes, but lockfile is frozen", .{}); @@ -9427,7 +9978,7 @@ pub const PackageManager = struct { manager.lockfile.saveToDisk(manager.options.lockfile_path); if (comptime Environment.allow_assert) { - if (manager.lockfile.hasMetaHashChanged(false) catch false) { + if (manager.lockfile.hasMetaHashChanged(false, packages_len_before_install) catch false) { Output.panic("Lockfile metahash non-deterministic after saving", .{}); } } @@ -9443,44 +9994,6 @@ pub const PackageManager = struct { } } - const binding_dot_gyp_path = Path.joinAbsStringZ( - Fs.FileSystem.instance.top_level_dir, - &[_]string{"binding.gyp"}, - .auto, - ); - if (root.scripts.hasAny()) { - const add_node_gyp_rebuild_script = root.scripts.install.isEmpty() and root.scripts.preinstall.isEmpty() and Syscall.exists(binding_dot_gyp_path); - - manager.root_lifecycle_scripts = root.scripts.enqueue( - manager.lockfile, - manager.lockfile.buffers.string_bytes.items, - strings.withoutTrailingSlash(FileSystem.instance.top_level_dir), - root.name.slice(manager.lockfile.buffers.string_bytes.items), - .root, - add_node_gyp_rebuild_script, - ); - } else { - if (Syscall.exists(binding_dot_gyp_path)) { - // no scripts exist but auto node gyp script needs to be added - manager.root_lifecycle_scripts = root.scripts.enqueue( - manager.lockfile, - manager.lockfile.buffers.string_bytes.items, - strings.withoutTrailingSlash(FileSystem.instance.top_level_dir), - root.name.slice(manager.lockfile.buffers.string_bytes.items), - .root, - true, - ); - } - } - - var install_summary = PackageInstall.Summary{}; - if (manager.options.do.install_packages) { - install_summary = try manager.installPackages( - ctx, - log_level, - ); - } - try manager.log.printForLogLevel(Output.errorWriter()); if (manager.log.hasErrors()) Global.crash(); @@ -9524,8 +10037,6 @@ pub const PackageManager = struct { }, } - manager.lifecycle_script_time_log.printAndDeinit(manager.lockfile.allocator); - if (!did_meta_hash_change) { manager.summary.remove = 0; manager.summary.add = 0; @@ -9544,7 +10055,7 @@ pub const PackageManager = struct { Output.pretty(" {d} package{s} installed ", .{ pkgs_installed, if (pkgs_installed == 1) "" else "s" }); Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); printed_timestamp = true; - Output.pretty("\n", .{}); + printSkippedScripts(install_summary); if (manager.summary.remove > 0) { Output.pretty(" Removed: {d}\n", .{manager.summary.remove}); @@ -9559,7 +10070,7 @@ pub const PackageManager = struct { Output.pretty(" {d} package{s} removed ", .{ manager.summary.remove, if (manager.summary.remove == 1) "" else "s" }); Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); printed_timestamp = true; - Output.pretty("\n", .{}); + printSkippedScripts(install_summary); } else if (install_summary.skipped > 0 and install_summary.fail == 0 and manager.package_json_updates.len == 0) { const count = @as(PackageID, @truncate(manager.lockfile.packages.len)); if (count != install_summary.skipped) { @@ -9571,7 +10082,7 @@ pub const PackageManager = struct { }); Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); printed_timestamp = true; - Output.pretty("\n", .{}); + printSkippedScripts(install_summary); } else { Output.pretty(" Done! Checked {d} package{s} (no changes) ", .{ install_summary.skipped, @@ -9579,7 +10090,7 @@ pub const PackageManager = struct { }); Output.printStartEndStdout(ctx.start_time, std.time.nanoTimestamp()); printed_timestamp = true; - Output.pretty("\n", .{}); + printSkippedScripts(install_summary); } } @@ -9603,6 +10114,18 @@ pub const PackageManager = struct { Output.flush(); } + fn printSkippedScripts(summary: PackageInstall.Summary) void { + const count = summary.packages_with_skipped_scripts_set.count(); + if (count > 0) { + Output.prettyln("\n\n Skipped ~{d} script{s}. Run `bun pm trusted` for details.\n", .{ + count, + if (count > 1) "s" else "", + }); + } else { + Output.pretty("\n", .{}); + } + } + pub fn spawnPackageLifecycleScripts( this: *PackageManager, ctx: Command.Context, @@ -9625,7 +10148,7 @@ pub const PackageManager = struct { try this.ensureTempNodeGypScript(); - const cwd = list.first().cwd; + const cwd = list.cwd; const this_bundler = try this.configureEnvForScripts(ctx, log_level); const original_path = this_bundler.env.get("PATH") orelse ""; diff --git a/src/install/lifecycle_script_runner.zig b/src/install/lifecycle_script_runner.zig index 20578c1af3..ea62ce845c 100644 --- a/src/install/lifecycle_script_runner.zig +++ b/src/install/lifecycle_script_runner.zig @@ -14,7 +14,7 @@ const Timer = std.time.Timer; pub const LifecycleScriptSubprocess = struct { package_name: []const u8, - scripts: [6]?Lockfile.Scripts.Entry, + scripts: Lockfile.Package.Scripts.List, current_script_index: u8 = 0, finished_fds: u8 = 0, @@ -158,14 +158,14 @@ pub const LifecycleScriptSubprocess = struct { errdefer _ = alive_count.fetchSub(1, .Monotonic); const manager = this.manager; - const original_script = this.scripts[next_script_index].?; - const cwd = original_script.cwd; + const original_script = this.scripts.items[next_script_index].?; + const cwd = this.scripts.cwd; const env = manager.env; if (manager.scripts_node) |scripts_node| { manager.setNodeName( scripts_node, - original_script.package_name, + this.package_name, PackageManager.ProgressStrings.script_emoji, true, ); @@ -175,7 +175,6 @@ pub const LifecycleScriptSubprocess = struct { } } - this.package_name = original_script.package_name; this.current_script_index = next_script_index; this.waitpid_result = null; this.finished_fds = 0; @@ -478,7 +477,7 @@ pub const LifecycleScriptSubprocess = struct { } for (this.current_script_index + 1..Lockfile.Scripts.names.len) |new_script_index| { - if (this.scripts[new_script_index] != null) { + if (this.scripts.items[new_script_index] != null) { this.resetPolls(); this.spawnNextScript(@intCast(new_script_index)) catch |err| { Output.errGeneric("Failed to run script {s} due to error {s}", .{ @@ -576,13 +575,14 @@ pub const LifecycleScriptSubprocess = struct { comptime log_level: PackageManager.Options.LogLevel, ) !void { var lifecycle_subprocess = try manager.allocator.create(LifecycleScriptSubprocess); - lifecycle_subprocess.scripts = list.items; + lifecycle_subprocess.scripts = list; lifecycle_subprocess.manager = manager; lifecycle_subprocess.envp = envp; + lifecycle_subprocess.package_name = list.package_name; if (comptime log_level.isVerbose()) { Output.prettyErrorln("[LifecycleScriptSubprocess] Starting scripts for \"{s}\"", .{ - list.first().package_name, + list.package_name, }); } diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 1043dcd778..6263d4e196 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -1,6 +1,7 @@ const std = @import("std"); const Allocator = std.mem.Allocator; const bun = @import("root").bun; +const FeatureFlags = bun.FeatureFlags; const string = bun.string; const Output = bun.Output; const Global = bun.Global; @@ -78,6 +79,7 @@ const Origin = Install.Origin; const PackageID = Install.PackageID; const PackageInstall = Install.PackageInstall; const PackageNameHash = Install.PackageNameHash; +const TruncatedPackageNameHash = Install.TruncatedPackageNameHash; const Resolution = @import("./resolution.zig").Resolution; const Crypto = @import("../sha.zig").Hashers; const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; @@ -86,7 +88,7 @@ const StaticHashMap = @import("../StaticHashMap.zig").StaticHashMap; const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8; const zero_hash = std.mem.zeroes(MetaHash); pub const NameHashMap = std.ArrayHashMapUnmanaged(PackageNameHash, String, ArrayIdentityContext.U64, false); -pub const NameHashSet = std.ArrayHashMapUnmanaged(u32, void, ArrayIdentityContext, false); +pub const TrustedDependenciesSet = std.ArrayHashMapUnmanaged(TruncatedPackageNameHash, void, ArrayIdentityContext, false); pub const VersionHashMap = std.ArrayHashMapUnmanaged(PackageNameHash, Semver.Version, ArrayIdentityContext.U64, false); const assertNoUninitializedPadding = @import("./padding_checker.zig").assertNoUninitializedPadding; @@ -111,22 +113,18 @@ scripts: Scripts = .{}, workspace_paths: NameHashMap = .{}, workspace_versions: VersionHashMap = .{}, -trusted_dependencies: NameHashSet = .{}, +/// Optional because `trustedDependencies` in package.json might be an +/// empty list or it might not exist +trusted_dependencies: ?TrustedDependenciesSet = null, overrides: OverrideMap = .{}, const Stream = std.io.FixedBufferStream([]u8); pub const default_filename = "bun.lockb"; -pub fn hasTrustedDependencies(this: *const Lockfile) bool { - return this.trusted_dependencies.count() > 0; -} - pub const Scripts = struct { const MAX_PARALLEL_PROCESSES = 10; pub const Entry = struct { - cwd: string, script: string, - package_name: string, }; pub const Entries = std.ArrayListUnmanaged(Entry); @@ -167,7 +165,6 @@ pub const Scripts = struct { inline for (Scripts.names) |hook| { const list = &@field(this, hook); for (list.items) |entry| { - allocator.free(entry.cwd); allocator.free(entry.script); } list.deinit(allocator); @@ -188,6 +185,7 @@ pub const LoadFromDiskResult = union(enum) { ok: struct { lockfile: *Lockfile, was_migrated: bool = false, + serializer_result: Serializer.SerializerLoadResult, }, pub const Step = enum { open_file, read_file, parse_file, migrating }; @@ -228,12 +226,12 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *log this.format = FormatVersion.current; this.scripts = .{}; - this.trusted_dependencies = .{}; + this.trusted_dependencies = null; this.workspace_paths = .{}; this.workspace_versions = .{}; this.overrides = .{}; - Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| { + const load_result = Lockfile.Serializer.load(this, &stream, allocator, log) catch |err| { return LoadFromDiskResult{ .err = .{ .step = .parse_file, .value = err } }; }; @@ -241,7 +239,12 @@ pub fn loadFromBytes(this: *Lockfile, buf: []u8, allocator: Allocator, log: *log this.verifyData() catch @panic("lockfile data is corrupt"); } - return LoadFromDiskResult{ .ok = .{ .lockfile = this } }; + return LoadFromDiskResult{ + .ok = .{ + .lockfile = this, + .serializer_result = load_result, + }, + }; } pub const InstallResult = struct { @@ -296,6 +299,15 @@ pub const Tree = struct { relative_path: stringZ, dependencies: []const DependencyID, tree_id: Tree.Id, + + /// depth of the node_modules folder in the tree + /// + /// 0 (./node_modules) + /// / \ + /// 1 1 + /// / + /// 2 + depth: usize, }; pub const Iterator = struct { @@ -304,7 +316,7 @@ pub const Tree = struct { dependencies: []const Dependency, resolutions: []const PackageID, tree_id: Id, - path_buf: [bun.MAX_PATH_BYTES]u8 = undefined, + path_buf: bun.PathBuffer = undefined, path_buf_len: usize = 0, last_parent: Id = invalid_id, string_buf: string, @@ -313,7 +325,7 @@ pub const Tree = struct { depth_stack: [(bun.MAX_PATH_BYTES / "node_modules".len) + 1]Id = undefined, pub fn init(lockfile: *const Lockfile) Iterator { - return .{ + var iter = Iterator{ .trees = lockfile.buffers.trees.items, .tree_id = 0, .dependency_ids = lockfile.buffers.hoisted_dependencies.items, @@ -321,6 +333,8 @@ pub const Tree = struct { .resolutions = lockfile.buffers.resolutions.items, .string_buf = lockfile.buffers.string_bytes.items, }; + @memcpy(iter.path_buf[0.."node_modules".len], "node_modules"); + return iter; } pub fn reload(this: *Iterator, lockfile: *const Lockfile) void { @@ -331,6 +345,10 @@ pub const Tree = struct { this.string_buf = lockfile.buffers.string_bytes.items; } + pub fn reset(this: *Iterator) void { + this.tree_id = 0; + } + pub fn nextNodeModulesFolder(this: *Iterator, completed_trees: ?*Bitset) ?NodeModulesFolder { if (this.tree_id >= this.trees.len) return null; @@ -343,13 +361,9 @@ pub const Tree = struct { } const tree = this.trees[this.tree_id]; - const string_buf = this.string_buf; + var depth: usize = 0; { - - // For now, the dumb way - // (the smart way is avoiding this copy) - this.path_buf[0.."node_modules".len].* = "node_modules".*; var parent_id = tree.id; var path_written: usize = "node_modules".len; this.depth_stack[0] = 0; @@ -362,16 +376,17 @@ pub const Tree = struct { depth_buf_len += 1; } depth_buf_len -= 1; + depth = depth_buf_len; while (depth_buf_len > 0) : (depth_buf_len -= 1) { this.path_buf[path_written] = std.fs.path.sep; path_written += 1; const tree_id = this.depth_stack[depth_buf_len]; - const name = this.dependencies[this.trees[tree_id].dependency_id].name.slice(string_buf); - bun.copy(u8, this.path_buf[path_written..], name); + const name = this.dependencies[this.trees[tree_id].dependency_id].name.slice(this.string_buf); + @memcpy(this.path_buf[path_written..][0..name.len], name); path_written += name.len; - this.path_buf[path_written..][0.."/node_modules".len].* = (std.fs.path.sep_str ++ "node_modules").*; + @memcpy(this.path_buf[path_written..][0.."/node_modules".len], std.fs.path.sep_str ++ "node_modules"); path_written += "/node_modules".len; } } @@ -385,6 +400,7 @@ pub const Tree = struct { .relative_path = relative_path, .dependencies = tree.dependencies.get(this.dependency_ids), .tree_id = tree.id, + .depth = depth, }; } }; @@ -853,6 +869,10 @@ pub fn cleanWithLogger( // This is where we update it in the lockfile from "latest" to "^17.0.2" try cloner.flush(); + new.trusted_dependencies = old_trusted_dependencies; + new.scripts = old_scripts; + new.meta_hash = old.meta_hash; + // Don't allow invalid memory to happen if (updates.len > 0) { const slice = new.packages.slice(); @@ -878,9 +898,6 @@ pub fn cleanWithLogger( } } } - new.trusted_dependencies = old_trusted_dependencies; - new.scripts = old_scripts; - new.meta_hash = old.meta_hash; return new; } @@ -1538,7 +1555,7 @@ pub const Printer = struct { }; }; -pub fn verifyData(this: *Lockfile) !void { +pub fn verifyData(this: *const Lockfile) !void { std.debug.assert(this.format == Lockfile.FormatVersion.current); var i: usize = 0; while (i < this.packages.len) : (i += 1) { @@ -1681,7 +1698,7 @@ pub fn saveToDisk(this: *Lockfile, filename: stringZ) void { }; } -pub fn rootPackage(this: *Lockfile) ?Lockfile.Package { +pub fn rootPackage(this: *const Lockfile) ?Lockfile.Package { if (this.packages.len == 0) { return null; } @@ -1689,11 +1706,11 @@ pub fn rootPackage(this: *Lockfile) ?Lockfile.Package { return this.packages.get(0); } -pub inline fn str(this: *Lockfile, slicable: anytype) string { +pub inline fn str(this: *const Lockfile, slicable: anytype) string { return strWithType(this, @TypeOf(slicable), slicable); } -inline fn strWithType(this: *Lockfile, comptime Type: type, slicable: Type) string { +inline fn strWithType(this: *const Lockfile, comptime Type: type, slicable: Type) string { if (comptime Type == String) { @compileError("str must be a *const String. Otherwise it is a pointer to a temporary which is undefined behavior"); } @@ -1715,7 +1732,7 @@ pub fn initEmpty(this: *Lockfile, allocator: Allocator) !void { .allocator = allocator, .scratch = Scratch.init(allocator), .scripts = .{}, - .trusted_dependencies = .{}, + .trusted_dependencies = null, .workspace_paths = .{}, .workspace_versions = .{}, }; @@ -1811,7 +1828,7 @@ pub fn getOrPutID(this: *Lockfile, id: PackageID, name_hash: PackageNameHash) !v } pub fn appendPackage(this: *Lockfile, package_: Lockfile.Package) !Lockfile.Package { - const id = @as(PackageID, @truncate(this.packages.len)); + const id: PackageID = @truncate(this.packages.len); return try appendPackageWithID(this, package_, id); } @@ -2339,7 +2356,7 @@ pub const Package = extern struct { /// was not resolved resolutions: PackageIDSlice = .{}, - meta: Meta = .{}, + meta: Meta = Meta.init(), bin: Bin = .{}, /// If any of these scripts run, they will run in order: @@ -2364,6 +2381,8 @@ pub const Package = extern struct { items: [Lockfile.Scripts.names.len]?Lockfile.Scripts.Entry, first_index: u8, total: u8, + cwd: string, + package_name: string, pub fn first(this: Package.Scripts.List) Lockfile.Scripts.Entry { if (comptime Environment.allow_assert) { @@ -2371,6 +2390,25 @@ pub const Package = extern struct { } return this.items[this.first_index].?; } + + pub fn deinit(this: Package.Scripts.List, allocator: std.mem.Allocator) void { + for (this.items) |maybe_item| { + if (maybe_item) |item| { + allocator.free(item.script); + } + } + + allocator.free(this.cwd); + } + + pub fn appendToLockfile(this: Package.Scripts.List, lockfile: *Lockfile) void { + inline for (this.items, 0..) |maybe_script, i| { + if (maybe_script) |script| { + debug("enqueue({s}, {s}) in {s}", .{ "prepare", this.package_name, this.cwd }); + @field(lockfile.scripts, Lockfile.Scripts.names[i]).append(lockfile.allocator, script) catch bun.outOfMemory(); + } + } + } }; pub fn clone(this: *const Package.Scripts, buf: []const u8, comptime Builder: type, builder: Builder) Package.Scripts { @@ -2397,16 +2435,15 @@ pub const Package = extern struct { return false; } - pub fn enqueue( + pub fn getScriptEntries( this: *const Package.Scripts, lockfile: *Lockfile, - lockfile_buf: []const u8, - _cwd: string, - package_name: string, + lockfile_buf: string, resolution_tag: Resolution.Tag, add_node_gyp_rebuild_script: bool, - ) ?Package.Scripts.List { - var cwd: ?string = null; + // return: first_index, total, entries + ) struct { i8, u8, [Lockfile.Scripts.names.len]?Lockfile.Scripts.Entry } { + const allocator = lockfile.allocator; var script_index: u8 = 0; var first_script_index: i8 = -1; var scripts: [6]?Lockfile.Scripts.Entry = .{null} ** 6; @@ -2416,33 +2453,21 @@ pub const Package = extern struct { { script_index += 1; const entry: Lockfile.Scripts.Entry = .{ - .cwd = cwd orelse brk: { - cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; - break :brk cwd.?; - }, - .script = lockfile.allocator.dupe(u8, "node-gyp rebuild") catch unreachable, - .package_name = package_name, + .script = allocator.dupe(u8, "node-gyp rebuild") catch unreachable, }; if (first_script_index == -1) first_script_index = @intCast(script_index); scripts[script_index] = entry; script_index += 1; - lockfile.scripts.install.append(lockfile.allocator, entry) catch unreachable; counter += 1; } // missing install and preinstall, only need to check postinstall if (!this.postinstall.isEmpty()) { const entry: Lockfile.Scripts.Entry = .{ - .cwd = cwd orelse brk: { - cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; - break :brk cwd.?; - }, - .script = lockfile.allocator.dupe(u8, this.preinstall.slice(lockfile_buf)) catch unreachable, - .package_name = package_name, + .script = allocator.dupe(u8, this.preinstall.slice(lockfile_buf)) catch unreachable, }; if (first_script_index == -1) first_script_index = @intCast(script_index); scripts[script_index] = entry; - lockfile.scripts.postinstall.append(lockfile.allocator, entry) catch unreachable; counter += 1; } script_index += 1; @@ -2456,18 +2481,11 @@ pub const Package = extern struct { inline for (install_scripts) |hook| { const script = @field(this, hook); if (!script.isEmpty()) { - debug("enqueue({s}, {s}) in {s}", .{ hook, package_name, _cwd }); const entry: Lockfile.Scripts.Entry = .{ - .cwd = cwd orelse brk: { - cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; - break :brk cwd.?; - }, - .script = lockfile.allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, - .package_name = package_name, + .script = allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, }; if (first_script_index == -1) first_script_index = @intCast(script_index); scripts[script_index] = entry; - @field(lockfile.scripts, hook).append(lockfile.allocator, entry) catch unreachable; counter += 1; } script_index += 1; @@ -2485,18 +2503,11 @@ pub const Package = extern struct { inline for (prepare_scripts) |hook| { const script = @field(this, hook); if (!script.isEmpty()) { - debug("enqueue({s}, {s}) in {s}", .{ hook, package_name, _cwd }); const entry: Lockfile.Scripts.Entry = .{ - .cwd = cwd orelse brk: { - cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; - break :brk cwd.?; - }, - .script = lockfile.allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, - .package_name = package_name, + .script = allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, }; if (first_script_index == -1) first_script_index = @intCast(script_index); scripts[script_index] = entry; - @field(lockfile.scripts, hook).append(lockfile.allocator, entry) catch unreachable; counter += 1; } script_index += 1; @@ -2505,19 +2516,11 @@ pub const Package = extern struct { .workspace => { script_index += 1; if (!this.prepare.isEmpty()) { - debug("enqueue({s}, {s}) in {s}", .{ "prepare", package_name, _cwd }); - const entry: Lockfile.Scripts.Entry = .{ - .cwd = cwd orelse brk: { - cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; - break :brk cwd.?; - }, - .script = lockfile.allocator.dupe(u8, this.prepare.slice(lockfile_buf)) catch unreachable, - .package_name = package_name, + .script = allocator.dupe(u8, this.prepare.slice(lockfile_buf)) catch unreachable, }; if (first_script_index == -1) first_script_index = @intCast(script_index); scripts[script_index] = entry; - lockfile.scripts.prepare.append(lockfile.allocator, entry) catch unreachable; counter += 1; } script_index += 2; @@ -2525,11 +2528,27 @@ pub const Package = extern struct { else => {}, } - if (first_script_index != -1) { + return .{ first_script_index, counter, scripts }; + } + + pub fn createList( + this: *const Package.Scripts, + lockfile: *Lockfile, + lockfile_buf: []const u8, + cwd: string, + package_name: string, + resolution_tag: Resolution.Tag, + add_node_gyp_rebuild_script: bool, + ) ?Package.Scripts.List { + const allocator = lockfile.allocator; + const first_index, const total, const scripts = getScriptEntries(this, lockfile, lockfile_buf, resolution_tag, add_node_gyp_rebuild_script); + if (first_index != -1) { return .{ .items = scripts, - .first_index = @intCast(first_script_index), - .total = counter, + .first_index = @intCast(first_index), + .total = total, + .cwd = allocator.dupe(u8, cwd) catch bun.outOfMemory(), + .package_name = package_name, }; } @@ -2564,13 +2583,65 @@ pub const Package = extern struct { } } - pub fn enqueueFromPackageJSON( + pub fn getList( + this: *Package.Scripts, + log: *logger.Log, + lockfile: *Lockfile, + node_modules: std.fs.Dir, + abs_node_modules_path: string, + folder_name: string, + resolution: *const Resolution, + ) !?Package.Scripts.List { + var path_buf_to_use: [bun.MAX_PATH_BYTES * 2]u8 = undefined; + if (this.hasAny()) { + const add_node_gyp_rebuild_script = if (lockfile.hasTrustedDependency(folder_name) and + this.install.isEmpty() and + this.preinstall.isEmpty()) + brk: { + const binding_dot_gyp_path = Path.joinAbsStringZ( + abs_node_modules_path, + &[_]string{ folder_name, "binding.gyp" }, + .auto, + ); + + break :brk bun.sys.exists(binding_dot_gyp_path); + } else false; + + const cwd = Path.joinAbsStringBufZTrailingSlash( + abs_node_modules_path, + &path_buf_to_use, + &[_]string{folder_name}, + .auto, + ); + + return this.createList( + lockfile, + lockfile.buffers.string_bytes.items, + cwd, + folder_name, + resolution.tag, + add_node_gyp_rebuild_script, + ); + } else if (!this.filled) { + return this.createFromPackageJSON( + log, + lockfile, + node_modules, + abs_node_modules_path, + folder_name, + resolution, + ); + } + + return null; + } + + pub fn createFromPackageJSON( this: *Package.Scripts, log: *logger.Log, lockfile: *Lockfile, node_modules: std.fs.Dir, node_modules_path: string, - subpath: [:0]const u8, folder_name: string, resolution: *const Resolution, ) !?Package.Scripts.List { @@ -2585,7 +2656,7 @@ pub const Package = extern struct { const json = brk: { const json_src = brk2: { - const json_path = bun.path.joinZ([_]string{ subpath, "package.json" }, .auto); + const json_path = bun.path.joinZ([_]string{ folder_name, "package.json" }, .auto); const json_file_fd = try bun.sys.openat( bun.toFD(node_modules.fd), json_path, @@ -2616,11 +2687,9 @@ pub const Package = extern struct { Lockfile.Package.Scripts.parseCount(lockfile.allocator, &builder, json); try builder.allocate(); this.parseAlloc(lockfile.allocator, &builder, json); + this.filled = true; - const add_node_gyp_rebuild_script = if (lockfile.hasTrustedDependency(folder_name) and - this.install.isEmpty() and - this.preinstall.isEmpty()) - brk: { + const add_node_gyp_rebuild_script = if (this.install.isEmpty() and this.preinstall.isEmpty()) brk: { const binding_dot_gyp_path = Path.joinAbsStringZ( cwd, &[_]string{"binding.gyp"}, @@ -2630,7 +2699,7 @@ pub const Package = extern struct { break :brk bun.sys.exists(binding_dot_gyp_path); } else false; - return this.enqueue( + return this.createList( lockfile, tmp.buffers.string_bytes.items, cwd, @@ -3052,8 +3121,8 @@ pub const Package = extern struct { package.meta.arch = package_version.cpu; package.meta.os = package_version.os; - package.meta.integrity = package_version.integrity; + package.meta.setHasInstallScript(package_version.has_install_script); package.dependencies.off = @as(u32, @truncate(dependencies_list.items.len)); package.dependencies.len = total_dependencies_count; @@ -3086,7 +3155,10 @@ pub const Package = extern struct { update: u32 = 0, overrides_changed: bool = false, - new_trusted_dependencies: NameHashSet = .{}, + // bool for if this dependency should be added to lockfile trusted dependencies. + // it is false when the new trusted dependency is coming from the default list. + added_trusted_dependencies: std.ArrayHashMapUnmanaged(TruncatedPackageNameHash, bool, ArrayIdentityContext, false) = .{}, + removed_trusted_dependencies: TrustedDependenciesSet = .{}, pub inline fn sum(this: *Summary, that: Summary) void { this.add += that.add; @@ -3095,7 +3167,9 @@ pub const Package = extern struct { } pub inline fn hasDiffs(this: Summary) bool { - return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed; + return this.add > 0 or this.remove > 0 or this.update > 0 or this.overrides_changed or + this.added_trusted_dependencies.count() > 0 or + this.removed_trusted_dependencies.count() > 0; } }; @@ -3139,12 +3213,104 @@ pub const Package = extern struct { } } - { - var to_lockfile_itr = to_lockfile.trusted_dependencies.iterator(); - while (to_lockfile_itr.next()) |entry| { - if (!from_lockfile.trusted_dependencies.contains(entry.key_ptr.*)) { - try summary.new_trusted_dependencies.put(allocator, entry.key_ptr.*, {}); + trusted_dependencies: { + // trusted dependency diff + // + // situations: + // 1 - Both old lockfile and new lockfile use default trusted dependencies, no diffs + // 2 - Both exist, only diffs are from additions and removals + // + // 3 - Old lockfile has trusted dependencies, new lockfile does not. Added are dependencies + // from default list that didn't exist previously. We need to be careful not to add these + // to the new lockfile. Removed are dependencies from old list that + // don't exist in the default list. + // + // 4 - Old lockfile used the default list, new lockfile has trusted dependencies. Added + // are dependencies are all from the new lockfile. Removed is empty because the default + // list isn't appended to the lockfile. + + // 1 + if (from_lockfile.trusted_dependencies == null and to_lockfile.trusted_dependencies == null) break :trusted_dependencies; + + // 2 + if (from_lockfile.trusted_dependencies != null and to_lockfile.trusted_dependencies != null) { + const from_trusted_dependencies = from_lockfile.trusted_dependencies.?; + const to_trusted_dependencies = to_lockfile.trusted_dependencies.?; + + { + // added + var to_trusted_iter = to_trusted_dependencies.iterator(); + while (to_trusted_iter.next()) |entry| { + const to_trusted = entry.key_ptr.*; + if (!from_trusted_dependencies.contains(to_trusted)) { + try summary.added_trusted_dependencies.put(allocator, to_trusted, true); + } + } } + + { + // removed + var from_trusted_iter = from_trusted_dependencies.iterator(); + while (from_trusted_iter.next()) |entry| { + const from_trusted = entry.key_ptr.*; + if (!to_trusted_dependencies.contains(from_trusted)) { + try summary.removed_trusted_dependencies.put(allocator, from_trusted, {}); + } + } + } + + break :trusted_dependencies; + } + + // 3 + if (from_lockfile.trusted_dependencies != null and to_lockfile.trusted_dependencies == null) { + const from_trusted_dependencies = from_lockfile.trusted_dependencies.?; + + { + // added + for (default_trusted_dependencies.entries) |entry| { + if (!from_trusted_dependencies.contains(@truncate(entry.hash))) { + // although this is a new trusted dependency, it is from the default + // list so it shouldn't be added to the lockfile + try summary.added_trusted_dependencies.put(allocator, @truncate(entry.hash), false); + } + } + } + + { + // removed + var from_trusted_iter = from_trusted_dependencies.iterator(); + while (from_trusted_iter.next()) |entry| { + const from_trusted = entry.key_ptr.*; + if (!default_trusted_dependencies.hasWithHash(@intCast(from_trusted))) { + try summary.removed_trusted_dependencies.put(allocator, from_trusted, {}); + } + } + } + + break :trusted_dependencies; + } + + // 4 + if (from_lockfile.trusted_dependencies == null and to_lockfile.trusted_dependencies != null) { + const to_trusted_dependencies = to_lockfile.trusted_dependencies.?; + + { + // add all to trusted dependencies, even if they exist in default because they weren't in the + // lockfile originally + var to_trusted_iter = to_trusted_dependencies.iterator(); + while (to_trusted_iter.next()) |entry| { + const to_trusted = entry.key_ptr.*; + try summary.added_trusted_dependencies.put(allocator, to_trusted, true); + } + } + + { + // removed + // none + } + + break :trusted_dependencies; } } @@ -3383,20 +3549,19 @@ pub const Package = extern struct { switch (dependency_version.tag) { .folder => { - dependency_version.value.folder = string_builder.append( - String, - Path.relative( + const relative = Path.relative( + FileSystem.instance.top_level_dir, + Path.joinAbsString( FileSystem.instance.top_level_dir, - Path.joinAbsString( - FileSystem.instance.top_level_dir, - &[_]string{ - source.path.name.dir, - dependency_version.value.folder.slice(buf), - }, - .auto, - ), + &[_]string{ + source.path.name.dir, + dependency_version.value.folder.slice(buf), + }, + .auto, ), ); + // if relative is empty, we are linking the package to itself + dependency_version.value.folder = string_builder.append(String, if (relative.len == 0) "." else relative); }, .npm => if (comptime tag != null) unreachable @@ -4173,7 +4338,8 @@ pub const Package = extern struct { if (json.asProperty("trustedDependencies")) |q| { switch (q.expr.data) { .e_array => |arr| { - try lockfile.trusted_dependencies.ensureUnusedCapacity(allocator, arr.items.len); + if (lockfile.trusted_dependencies == null) lockfile.trusted_dependencies = .{}; + try lockfile.trusted_dependencies.?.ensureUnusedCapacity(allocator, arr.items.len); for (arr.slice()) |item| { const name = item.asString(allocator) orelse { log.addErrorFmt(&source, q.loc, allocator, @@ -4184,7 +4350,7 @@ pub const Package = extern struct { , .{}) catch {}; return error.InvalidPackageJSON; }; - lockfile.trusted_dependencies.putAssumeCapacity(@as(u32, @truncate(String.Builder.stringHash(name))), {}); + lockfile.trusted_dependencies.?.putAssumeCapacity(@as(u32, @truncate(String.Builder.stringHash(name))), {}); } }, else => { @@ -4338,7 +4504,7 @@ pub const Package = extern struct { inline for (dependency_groups) |group| { if (group.behavior.isWorkspace()) { - var seen_workspace_names = NameHashSet{}; + var seen_workspace_names = TrustedDependenciesSet{}; defer seen_workspace_names.deinit(allocator); for (workspace_names.values(), workspace_names.keys()) |entry, path| { @@ -4538,7 +4704,7 @@ pub const Package = extern struct { pub const Meta = extern struct { // TODO: when we bump the lockfile version, we should reorder this to: - // id(32), arch(16), os(16), id(8), man_dir(8), integrity(72 align 8) + // id(32), arch(16), os(16), id(8), man_dir(8), __has_install_script(8), integrity(72 align 8) // should allow us to remove padding bytes // TODO: remove origin. it doesnt do anything and can be inferred from the resolution @@ -4553,7 +4719,20 @@ pub const Package = extern struct { man_dir: String = String{}, integrity: Integrity = Integrity{}, - _padding_integrity: [3]u8 = .{0} ** 3, + + /// Shouldn't be used directly. Use `Meta.hasInstallScript()` and + /// `Meta.setHasInstallScript()` instead. + /// + /// `.old` represents the value of this field before it was used + /// in the lockfile and should never be saved to a new lockfile. + /// There is a debug assert for this in `Lockfile.Package.Serializer.save()`. + __has_install_script: enum(u8) { + old, + false, + true, + }, + + _padding_integrity: [2]u8 = .{0} ** 2, /// Does the `cpu` arch and `os` match the requirements listed in the package? /// This is completely unrelated to "devDependencies", "peerDependencies", "optionalDependencies" etc @@ -4561,10 +4740,28 @@ pub const Package = extern struct { return !this.arch.isMatch() or !this.os.isMatch(); } + pub fn hasInstallScript(this: *const Meta) bool { + return this.__has_install_script == .true; + } + + pub fn setHasInstallScript(this: *Meta, has_script: bool) void { + this.__has_install_script = if (has_script) .true else .false; + } + + pub fn needsUpdate(this: *const Meta) bool { + return this.__has_install_script == .old; + } + pub fn count(this: *const Meta, buf: []const u8, comptime StringBuilderType: type, builder: StringBuilderType) void { builder.count(this.man_dir.slice(buf)); } + pub fn init() Meta { + return .{ + .__has_install_script = .false, + }; + } + pub fn clone(this: *const Meta, id: PackageID, buf: []const u8, comptime StringBuilderType: type, builder: StringBuilderType) Meta { return Meta{ .id = id, @@ -4573,6 +4770,7 @@ pub const Package = extern struct { .arch = this.arch, .os = this.os, .origin = this.origin, + .__has_install_script = this.__has_install_script, }; } }; @@ -4650,9 +4848,14 @@ pub const Package = extern struct { inline for (FieldsEnum.fields) |field| { const value = sliced.items(@field(Lockfile.Package.List.Field, field.name)); - if (comptime Environment.allow_assert) + if (comptime Environment.allow_assert) { debug("save(\"{s}\") = {d} bytes", .{ field.name, std.mem.sliceAsBytes(value).len }); - + if (comptime strings.eqlComptime(field.name, "meta")) { + for (value) |meta| { + std.debug.assert(meta.__has_install_script != .old); + } + } + } comptime assertNoUninitializedPadding(@TypeOf(value)); try writer.writeAll(std.mem.sliceAsBytes(value)); } @@ -4663,11 +4866,16 @@ pub const Package = extern struct { _ = stream.pwrite(std.mem.asBytes(&really_end_at), end_at); } + const PackagesLoadResult = struct { + list: Lockfile.Package.List, + needs_update: bool = false, + }; + pub fn load( stream: *Stream, end: usize, allocator: Allocator, - ) !Lockfile.Package.List { + ) !PackagesLoadResult { var reader = stream.reader(); const list_len = try reader.readInt(u64, .little); @@ -4704,6 +4912,7 @@ pub const Package = extern struct { list.len = list_len; var sliced = list.slice(); + var needs_update = false; inline for (FieldsEnum.fields) |field| { const value = sliced.items(@field(Lockfile.Package.List.Field, field.name)); @@ -4713,6 +4922,11 @@ pub const Package = extern struct { if (end_pos <= end_at) { @memcpy(bytes, stream.buffer[stream.pos..][0..bytes.len]); stream.pos = end_pos; + if (comptime strings.eqlComptime(field.name, "meta")) { + if (value.len != 0 and value[0].needsUpdate()) { + needs_update = true; + } + } } else if (comptime strings.eqlComptime(field.name, "scripts")) { @memset(bytes, 0); } else { @@ -4720,7 +4934,10 @@ pub const Package = extern struct { } } - return list; + return .{ + .list = list, + .needs_update = needs_update, + }; } }; }; @@ -4730,7 +4947,9 @@ pub fn deinit(this: *Lockfile) void { this.packages.deinit(this.allocator); this.string_pool.deinit(); this.scripts.deinit(this.allocator); - this.trusted_dependencies.deinit(this.allocator); + if (this.trusted_dependencies) |*trusted_dependencies| { + trusted_dependencies.deinit(this.allocator); + } this.workspace_paths.deinit(this.allocator); this.workspace_versions.deinit(this.allocator); this.overrides.deinit(this.allocator); @@ -5036,14 +5255,18 @@ pub const Serializer = struct { pub const version = "bun-lockfile-format-v0\n"; const header_bytes: string = "#!/usr/bin/env bun\n" ++ version; - const has_workspace_package_ids_tag: u64 = @bitCast([_]u8{ 'w', 'O', 'r', 'K', 's', 'P', 'a', 'C' }); - const has_trusted_dependencies_tag: u64 = @bitCast([_]u8{ 't', 'R', 'u', 'S', 't', 'E', 'D', 'd' }); - const has_overrides_tag: u64 = @bitCast([_]u8{ 'o', 'V', 'e', 'R', 'r', 'i', 'D', 's' }); + const has_workspace_package_ids_tag: u64 = @bitCast(@as([8]u8, "wOrKsPaC".*)); + const has_trusted_dependencies_tag: u64 = @bitCast(@as([8]u8, "tRuStEDd".*)); + const has_empty_trusted_dependencies_tag: u64 = @bitCast(@as([8]u8, "eMpTrUsT".*)); + const has_overrides_tag: u64 = @bitCast(@as([8]u8, "oVeRriDs".*)); pub fn save(this: *Lockfile, bytes: *std.ArrayList(u8), total_size: *usize, end_pos: *usize) !void { - var old_package_list = this.packages; + + // we clone packages with the z_allocator to make sure bytes are zeroed. + // TODO: investigate if we still need this now that we have `padding_checker.zig` + var old_packages_list = this.packages; this.packages = try this.packages.clone(z_allocator); - old_package_list.deinit(this.allocator); + old_packages_list.deinit(this.allocator); var writer = bytes.writer(); try writer.writeAll(header_bytes); @@ -5117,17 +5340,21 @@ pub const Serializer = struct { ); } - if (this.trusted_dependencies.count() > 0) { - try writer.writeAll(std.mem.asBytes(&has_trusted_dependencies_tag)); + if (this.trusted_dependencies) |trusted_dependencies| { + if (trusted_dependencies.count() > 0) { + try writer.writeAll(std.mem.asBytes(&has_trusted_dependencies_tag)); - try Lockfile.Buffers.writeArray( - StreamType, - stream, - @TypeOf(writer), - writer, - []u32, - this.trusted_dependencies.keys(), - ); + try Lockfile.Buffers.writeArray( + StreamType, + stream, + @TypeOf(writer), + writer, + []u32, + trusted_dependencies.keys(), + ); + } else { + try writer.writeAll(std.mem.asBytes(&has_empty_trusted_dependencies_tag)); + } } if (this.overrides.map.count() > 0) { @@ -5162,12 +5389,18 @@ pub const Serializer = struct { try writer.writeAll(&alignment_bytes_to_repeat_buffer); } + + pub const SerializerLoadResult = struct { + packages_need_update: bool = false, + }; + pub fn load( lockfile: *Lockfile, stream: *Stream, allocator: Allocator, log: *logger.Log, - ) !void { + ) !SerializerLoadResult { + var res = SerializerLoadResult{}; var reader = stream.reader(); var header_buf_: [header_bytes.len]u8 = undefined; const header_buf = header_buf_[0..try reader.readAll(&header_buf_)]; @@ -5191,11 +5424,15 @@ pub const Serializer = struct { return error.@"Lockfile is missing data"; } - lockfile.packages = try Lockfile.Package.Serializer.load( + const packages_load_result = try Lockfile.Package.Serializer.load( stream, total_buffer_size, allocator, ); + + lockfile.packages = packages_load_result.list; + res.packages_need_update = packages_load_result.needs_update; + lockfile.buffers = try Lockfile.Buffers.load(stream, allocator, log); if ((try stream.reader().readInt(u64, .little)) != 0) { return error.@"Lockfile is malformed (expected 0 at the end)"; @@ -5269,9 +5506,10 @@ pub const Serializer = struct { { const remaining_in_buffer = total_buffer_size -| stream.pos; - if (remaining_in_buffer > 8 and total_buffer_size <= stream.buffer.len) { + // >= because `has_empty_trusted_dependencies_tag` is tag only + if (remaining_in_buffer >= 8 and total_buffer_size <= stream.buffer.len) { const next_num = try reader.readInt(u64, .little); - if (next_num == has_trusted_dependencies_tag) { + if (remaining_in_buffer > 8 and next_num == has_trusted_dependencies_tag) { var trusted_dependencies_hashes = try Lockfile.Buffers.readArray( stream, allocator, @@ -5279,11 +5517,15 @@ pub const Serializer = struct { ); defer trusted_dependencies_hashes.deinit(allocator); - try lockfile.trusted_dependencies.ensureTotalCapacity(allocator, trusted_dependencies_hashes.items.len); + lockfile.trusted_dependencies = .{}; + try lockfile.trusted_dependencies.?.ensureTotalCapacity(allocator, trusted_dependencies_hashes.items.len); - lockfile.trusted_dependencies.entries.len = trusted_dependencies_hashes.items.len; - @memcpy(lockfile.trusted_dependencies.keys(), trusted_dependencies_hashes.items); - try lockfile.trusted_dependencies.reIndex(allocator); + lockfile.trusted_dependencies.?.entries.len = trusted_dependencies_hashes.items.len; + @memcpy(lockfile.trusted_dependencies.?.keys(), trusted_dependencies_hashes.items); + try lockfile.trusted_dependencies.?.reIndex(allocator); + } else if (next_num == has_empty_trusted_dependencies_tag) { + // trusted dependencies exists in package.json but is an empty array. + lockfile.trusted_dependencies = .{}; } else { stream.pos -= 8; } @@ -5357,24 +5599,25 @@ pub const Serializer = struct { if (comptime Environment.allow_assert) std.debug.assert(stream.pos == total_buffer_size); // const end = try reader.readInt(u64, .little); + return res; } }; -pub fn hasMetaHashChanged(this: *Lockfile, print_name_version_string: bool) !bool { +pub fn hasMetaHashChanged(this: *Lockfile, print_name_version_string: bool, packages_len: usize) !bool { const previous_meta_hash = this.meta_hash; - this.meta_hash = try this.generateMetaHash(print_name_version_string); + this.meta_hash = try this.generateMetaHash(print_name_version_string, packages_len); return !strings.eqlLong(&previous_meta_hash, &this.meta_hash, false); } -pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaHash { - if (this.packages.len <= 1) +pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool, packages_len: usize) !MetaHash { + if (packages_len <= 1) return zero_hash; var string_builder = GlobalStringBuilder{}; defer string_builder.deinit(this.allocator); - const names: []const String = this.packages.items(.name); - const resolutions: []const Resolution = this.packages.items(.resolution); + const names: []const String = this.packages.items(.name)[0..packages_len]; + const resolutions: []const Resolution = this.packages.items(.resolution)[0..packages_len]; const bytes = this.buffers.string_bytes.items; - var alphabetized_names = try this.allocator.alloc(PackageID, this.packages.len -| 1); + var alphabetized_names = try this.allocator.alloc(PackageID, packages_len -| 1); defer this.allocator.free(alphabetized_names); const hash_prefix = "\n-- BEGIN SHA512/256(`${alphabetize(name)}@${order(version)}`) --\n"; @@ -5383,7 +5626,7 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaH { var i: usize = 1; - while (i + 16 < this.packages.len) : (i += 16) { + while (i + 16 < packages_len) : (i += 16) { comptime var j: usize = 0; inline while (j < 16) : (j += 1) { alphabetized_names[(i + j) - 1] = @as(PackageID, @truncate((i + j))); @@ -5391,7 +5634,7 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaH } } - while (i < this.packages.len) : (i += 1) { + while (i < packages_len) : (i += 1) { alphabetized_names[i - 1] = @as(PackageID, @truncate(i)); string_builder.fmtCount("{s}@{}\n", .{ names[i].slice(bytes), resolutions[i].fmt(bytes) }); } @@ -5405,7 +5648,7 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaH const scripts = @field(this.scripts, field_name); for (scripts.items) |script| { if (script.script.len > 0) { - string_builder.fmtCount("{s}@{s}: {s}\n", .{ field_name, script.cwd, script.script }); + string_builder.fmtCount("{s}: {s}\n", .{ field_name, script.script }); has_scripts = true; } } @@ -5441,7 +5684,7 @@ pub fn generateMetaHash(this: *Lockfile, print_name_version_string: bool) !MetaH const scripts = @field(this.scripts, field_name); for (scripts.items) |script| { if (script.script.len > 0) { - _ = string_builder.fmt("{s}@{s}: {s}\n", .{ field_name, script.cwd, script.script }); + _ = string_builder.fmt("{s}: {s}\n", .{ field_name, script.script }); } } } @@ -5497,35 +5740,80 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve return null; } -/// The default list of trusted dependencies is a static hashmap -const default_trusted_dependencies = brk: { - const max_values = 512; - - var map: StaticHashMap([]const u8, u0, std.hash_map.StringContext, max_values) = .{}; +const max_default_trusted_dependencies = 512; +pub const default_trusted_dependencies_list: []string = brk: { // This file contains a list of dependencies that Bun runs `postinstall` on by default. const data = @embedFile("./default-trusted-dependencies.txt"); - @setEvalBranchQuota(99999); - + @setEvalBranchQuota(999999); + var buf: [max_default_trusted_dependencies]string = undefined; + var i: usize = 0; var iter = std.mem.tokenizeAny(u8, data, " \n\t"); while (iter.next()) |dep| { - if (map.len == max_values) { - @compileError("default-trusted-dependencies.txt is too large, please increase 'max_values' in lockfile.zig"); + buf[i] = dep; + i += 1; + } + + const Sorter = struct { + pub fn lessThan(_: void, lhs: string, rhs: string) bool { + return std.mem.order(u8, lhs, rhs) == .lt; } - map.putAssumeCapacity(dep, 0); + }; + + const names = buf[0..i]; + + // alphabetical so we don't need to sort in `bun pm trusted --default` + std.sort.pdq(string, names, {}, Sorter.lessThan); + + break :brk names; +}; + +/// The default list of trusted dependencies is a static hashmap +const default_trusted_dependencies = brk: { + const StringHashContext = struct { + pub fn hash(_: @This(), s: []const u8) u64 { + @setEvalBranchQuota(999999); + // truncate to u32 because Lockfile.trustedDependencies uses the same u32 string hash + return @intCast(@as(u32, @truncate(String.Builder.stringHash(s)))); + } + pub fn eql(_: @This(), a: []const u8, b: []const u8) bool { + @setEvalBranchQuota(999999); + return std.mem.eql(u8, a, b); + } + }; + + var map: StaticHashMap([]const u8, void, StringHashContext, max_default_trusted_dependencies) = .{}; + + for (default_trusted_dependencies_list) |dep| { + if (map.len == max_default_trusted_dependencies) { + @compileError("default-trusted-dependencies.txt is too large, please increase 'max_default_trusted_dependencies' in lockfile.zig"); + } + + // just in case there's duplicates from truncating + if (map.has(dep)) @compileError("Duplicate hash due to u64 -> u32 truncation"); + + map.putAssumeCapacity(dep, {}); } break :brk ↦ }; pub fn hasTrustedDependency(this: *Lockfile, name: []const u8) bool { - if (this.hasTrustedDependencies()) { - const hash = @as(u32, @truncate(String.Builder.stringHash(name))); - return this.trusted_dependencies.contains(hash) or default_trusted_dependencies.has(name); - } + if (comptime FeatureFlags.breaking_changes_1_1_0) { + if (this.trusted_dependencies) |trusted_dependencies| { + const hash = @as(u32, @truncate(String.Builder.stringHash(name))); + return trusted_dependencies.contains(hash); + } - // always search through default trusted dependencies - return default_trusted_dependencies.has(name); + return default_trusted_dependencies.has(name); + } else { + if (this.trusted_dependencies) |trusted_dependencies| { + const hash = @as(u32, @truncate(String.Builder.stringHash(name))); + return trusted_dependencies.contains(hash) or default_trusted_dependencies.has(name); + } + + return default_trusted_dependencies.has(name); + } } pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep: Dependency, res: ?PackageID) !void { diff --git a/src/install/migration.zig b/src/install/migration.zig index 8c404bce59..fa8e8d0ad1 100644 --- a/src/install/migration.zig +++ b/src/install/migration.zig @@ -487,6 +487,14 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo .man_dir = String{}, + .__has_install_script = if (pkg.get("hasInstallScript")) |has_install_script_expr| brk: { + if (has_install_script_expr.data != .e_boolean) return error.InvalidNPMLockfile; + break :brk if (has_install_script_expr.data.e_boolean.value) + .true + else + .false; + } else .false, + .integrity = if (pkg.get("integrity")) |integrity| try Integrity.parse( integrity.asString(this.allocator) orelse @@ -1016,9 +1024,15 @@ pub fn migrateNPMLockfile(this: *Lockfile, allocator: Allocator, log: *logger.Lo try this.verifyData(); } - this.meta_hash = try this.generateMetaHash(false); + this.meta_hash = try this.generateMetaHash(false, this.packages.len); - return LoadFromDiskResult{ .ok = .{ .lockfile = this, .was_migrated = true } }; + return LoadFromDiskResult{ + .ok = .{ + .lockfile = this, + .was_migrated = true, + .serializer_result = .{}, + }, + }; } fn packageNameFromPath(pkg_path: []const u8) []const u8 { diff --git a/src/install/npm.zig b/src/install/npm.zig index 685e2880cb..3a3d4d29fd 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -667,7 +667,7 @@ pub const PackageManifest = struct { } pub fn save(this: *const PackageManifest, tmpdir: std.fs.Dir, cache_dir: std.fs.Dir) !void { - const file_id = bun.Wyhash.hash(0, this.name()); + const file_id = bun.Wyhash11.hash(0, this.name()); var dest_path_buf: [512 + 64]u8 = undefined; var out_path_buf: ["-18446744073709551615".len + ".npm".len + 1]u8 = undefined; var dest_path_stream = std.io.fixedBufferStream(&dest_path_buf); @@ -684,7 +684,7 @@ pub const PackageManifest = struct { } pub fn load(allocator: std.mem.Allocator, cache_dir: std.fs.Dir, package_name: string) !?PackageManifest { - const file_id = bun.Wyhash.hash(0, package_name); + const file_id = bun.Wyhash11.hash(0, package_name); var file_path_buf: [512 + 64]u8 = undefined; const hex_fmt = bun.fmt.hexIntLower(file_id); const file_path = try std.fmt.bufPrintZ(&file_path_buf, "{any}.npm", .{hex_fmt}); @@ -1404,8 +1404,8 @@ pub const PackageManifest = struct { var this_names = dependency_names[0..count]; var this_versions = dependency_values[0..count]; - var name_hasher = bun.Wyhash.init(0); - var version_hasher = bun.Wyhash.init(0); + var name_hasher = bun.Wyhash11.init(0); + var version_hasher = bun.Wyhash11.init(0); const is_peer = comptime strings.eqlComptime(pair.prop, "peerDependencies"); diff --git a/src/install/resolution.zig b/src/install/resolution.zig index bc8f14eed4..e5bc2ad25b 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -28,6 +28,10 @@ pub const Resolution = extern struct { return this.tag.isGit(); } + pub fn isLocal(this: *const Resolution) bool { + return this.tag.isLocal(); + } + pub fn order( lhs: *const Resolution, rhs: *const Resolution, @@ -324,5 +328,9 @@ pub const Resolution = extern struct { pub fn isGit(this: Tag) bool { return this == .git or this == .github or this == .gitlab; } + + pub fn isLocal(this: Tag) bool { + return this == .local_tarball or this == .folder or this == .symlink or this == .workspace or this == .root; + } }; }; diff --git a/src/install/resolvers/folder_resolver.zig b/src/install/resolvers/folder_resolver.zig index f64bd404a3..9dccaf6280 100644 --- a/src/install/resolvers/folder_resolver.zig +++ b/src/install/resolvers/folder_resolver.zig @@ -1,6 +1,8 @@ const std = @import("std"); const PackageID = @import("../install.zig").PackageID; const Lockfile = @import("../install.zig").Lockfile; +const initializeStore = @import("../install.zig").initializeStore; +const json_parser = bun.JSON; const PackageManager = @import("../install.zig").PackageManager; const Npm = @import("../npm.zig"); const logger = @import("root").bun.logger; @@ -107,7 +109,10 @@ pub const FolderResolution = union(Tag) { var abs: string = ""; var rel: string = ""; // We consider it valid if there is a package.json in the folder - const normalized = std.mem.trimRight(u8, normalize(non_normalized_path), std.fs.path.sep_str); + const normalized = if (non_normalized_path.len == 1 and non_normalized_path[0] == '.') + non_normalized_path + else + std.mem.trimRight(u8, normalize(non_normalized_path), std.fs.path.sep_str); if (strings.startsWithChar(normalized, '.')) { var tempcat: [bun.MAX_PATH_BYTES]u8 = undefined; @@ -184,6 +189,18 @@ pub const FolderResolution = union(Tag) { features, ); + const has_scripts = package.scripts.hasAny() or brk: { + const dir = std.fs.path.dirname(abs) orelse ""; + const binding_dot_gyp_path = bun.path.joinAbsStringZ( + dir, + &[_]string{"binding.gyp"}, + .auto, + ); + break :brk bun.sys.exists(binding_dot_gyp_path); + }; + + package.meta.setHasInstallScript(has_scripts); + if (manager.lockfile.getPackageID(package.name_hash, version, &package.resolution)) |existing_id| { package.meta.id = existing_id; manager.lockfile.packages.set(existing_id, package); diff --git a/src/install/semver.zig b/src/install/semver.zig index 009b73d7f8..a4325a4d31 100644 --- a/src/install/semver.zig +++ b/src/install/semver.zig @@ -300,7 +300,7 @@ pub const String = extern struct { pub const StringPool = std.HashMap(u64, String, IdentityContext(u64), 80); pub inline fn stringHash(buf: []const u8) u64 { - return bun.Wyhash.hash(0, buf); + return bun.Wyhash11.hash(0, buf); } pub inline fn count(this: *Builder, slice_: string) void { @@ -578,7 +578,7 @@ pub const SlicedString = struct { std.debug.assert(@intFromPtr(this.buf.ptr) <= @intFromPtr(this.slice.ptr) and ((@intFromPtr(this.slice.ptr) + this.slice.len) <= (@intFromPtr(this.buf.ptr) + this.buf.len))); } - return ExternalString.init(this.buf, this.slice, bun.Wyhash.hash(0, this.slice)); + return ExternalString.init(this.buf, this.slice, bun.Wyhash11.hash(0, this.slice)); } pub inline fn value(this: SlicedString) String { diff --git a/src/js_ast.zig b/src/js_ast.zig index 93e532a144..aa6a8ffc10 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -1458,6 +1458,22 @@ pub const E = struct { return array; } + + /// Assumes each item in the array is a string + pub fn alphabetizeStrings(this: *Array) void { + if (comptime Environment.allow_assert) { + for (this.items.slice()) |item| { + std.debug.assert(item.data == .e_string); + } + } + std.sort.pdq(Expr, this.items.slice(), {}, Sorter.isLessThan); + } + + const Sorter = struct { + pub fn isLessThan(ctx: void, lhs: Expr, rhs: Expr) bool { + return strings.cmpStringsAsc(ctx, lhs.data.e_string.data, rhs.data.e_string.data); + } + }; }; pub const Unary = struct { @@ -2070,7 +2086,13 @@ pub const E = struct { return null; } + /// Assumes each key in the property is a string pub fn alphabetizeProperties(this: *Object) void { + if (comptime Environment.allow_assert) { + for (this.properties.slice()) |prop| { + std.debug.assert(prop.key.?.data == .e_string); + } + } std.sort.pdq(G.Property, this.properties.slice(), {}, Sorter.isLessThan); } @@ -3038,6 +3060,8 @@ pub const Expr = struct { loc: logger.Loc, data: Data, + pub const empty = Expr{ .data = .{ .e_missing = E.Missing{} }, .loc = logger.Loc.Empty }; + pub fn isAnonymousNamed(expr: Expr) bool { return switch (expr.data) { .e_arrow => true, diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index bb5a242a3a..d73230dfcc 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -2218,6 +2218,7 @@ pub const Resolver = struct { ) catch |err| { return .{ .failure = err }; }; + package.meta.setHasInstallScript(package.scripts.hasAny()); package = pm.lockfile.appendPackage(package) catch |err| { return .{ .failure = err }; }; @@ -2232,6 +2233,7 @@ pub const Resolver = struct { .value = .{ .root = {} }, }, }; + package.meta.setHasInstallScript(package.scripts.hasAny()); package = pm.lockfile.appendPackage(package) catch |err| { return .{ .failure = err }; }; diff --git a/src/wyhash.zig b/src/wyhash.zig index 95a25d2cab..34a69f89cd 100644 --- a/src/wyhash.zig +++ b/src/wyhash.zig @@ -135,21 +135,21 @@ const WyhashStateless = struct { /// Fast non-cryptographic 64bit hash function. /// See https://github.com/wangyi-fudan/wyhash -pub const Wyhash = struct { +pub const Wyhash11 = struct { state: WyhashStateless, buf: [32]u8, buf_len: usize, - pub fn init(seed: u64) Wyhash { - return Wyhash{ + pub fn init(seed: u64) Wyhash11 { + return Wyhash11{ .state = WyhashStateless.init(seed), .buf = undefined, .buf_len = 0, }; } - pub fn update(self: *Wyhash, b: []const u8) void { + pub fn update(self: *Wyhash11, b: []const u8) void { var off: usize = 0; if (self.buf_len != 0 and self.buf_len + b.len >= 32) { @@ -167,7 +167,7 @@ pub const Wyhash = struct { self.buf_len += @as(u8, @intCast(b[off + aligned_len ..].len)); } - pub fn final(self: *Wyhash) u64 { + pub fn final(self: *Wyhash11) u64 { const rem_key = self.buf[0..self.buf_len]; return self.state.final(rem_key); diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 9038957ce2..fb6db756d1 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -6287,7 +6287,10 @@ it("should handle trustedDependencies", async () => { " + bar@bar", " + moo@moo", "", - " 2 packages installed", + expect.stringContaining(" 2 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "bar", "moo"]); diff --git a/test/cli/install/registry/bun-install-registry.test.ts b/test/cli/install/registry/bun-install-registry.test.ts index 6051ab4c68..0125fee597 100644 --- a/test/cli/install/registry/bun-install-registry.test.ts +++ b/test/cli/install/registry/bun-install-registry.test.ts @@ -409,6 +409,9 @@ test("it should correctly link binaries after deleting node_modules", async () = " + what-bin@1.0.0", "", expect.stringContaining("3 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); @@ -434,6 +437,9 @@ test("it should correctly link binaries after deleting node_modules", async () = " + what-bin@1.0.0", "", expect.stringContaining("3 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); }); @@ -574,6 +580,9 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy " + what-bin@1.0.0", "", expect.stringContaining("19 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); @@ -610,6 +619,9 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy " + what-bin@1.0.0", "", expect.stringContaining("19 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); @@ -1635,6 +1647,9 @@ test("missing package on reinstall, some with binaries", async () => { " + what-bin@1.0.0", "", expect.stringContaining("19 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); @@ -1878,7 +1893,7 @@ for (const forceWaiterThread of [false, true]) { expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); - }, 10_000); + }, 20_000); test("workspace lifecycle scripts", async () => { await writeFile( @@ -2054,7 +2069,10 @@ for (const forceWaiterThread of [false, true]) { "", " + all-lifecycle-scripts@1.0.0", "", - " 1 package installed", + expect.stringContaining("1 package installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); @@ -2514,6 +2532,9 @@ for (const forceWaiterThread of [false, true]) { " + binding-gyp-scripts@1.5.0", "", expect.stringContaining("2 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeFalse(); @@ -2700,6 +2721,9 @@ for (const forceWaiterThread of [false, true]) { " + lifecycle-install-test@github:dylan-conway/lifecycle-install-test#3ba6af5", "", expect.stringContaining("1 package installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeFalse(); @@ -2863,6 +2887,9 @@ for (const forceWaiterThread of [false, true]) { " + what-bin@1.5.0", "", expect.stringContaining("3 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", ]); expect(await exited).toBe(0); expect(await file(join(packageDir, "node_modules", ".bin", "what-bin")).text()).toContain("what-bin@1.5.0"); @@ -2987,9 +3014,781 @@ for (const forceWaiterThread of [false, true]) { expect(err).not.toContain("error:"); expect(err).toContain("v"); }); + + test.todo("default trusted dependencies should not be used of trustedDependencies is populated", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "uses-what-bin": "1.0.0", + // fake electron package because it's in the default trustedDependencies list + "electron": "1.0.0", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + // electron lifecycle scripts should run, uses-what-bin scripts should not run + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + electron@1.0.0", + " + uses-what-bin@1.0.0", + "", + expect.stringContaining("3 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "uses-what-bin": "1.0.0", + "electron": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }), + ); + + // now uses-what-bin scripts should run and electron scripts should not run. + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + electron@1.0.0", + " + uses-what-bin@1.0.0", + "", + expect.stringContaining("3 packages installed"), + ]); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); + }); + + test.todo("does not run any scripts if trustedDependencies is an empty list", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + "uses-what-bin": "1.0.0", + "electron": "1.0.0", + }, + trustedDependencies: [], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + const err = await Bun.readableStreamToText(stderr); + const out = await Bun.readableStreamToText(stdout); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + electron@1.0.0", + " + uses-what-bin@1.0.0", + "", + expect.stringContaining("3 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); + }); + + test.todo("will run default trustedDependencies after install that didn't include them", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + electron: "1.0.0", + }, + trustedDependencies: ["blah"], + }), + ); + + // first install does not run electron scripts + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + var out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + electron@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeFalse(); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.2.3", + dependencies: { + electron: "1.0.0", + }, + }), + ); + + // The electron scripts should run now because it's in default trusted dependencies. + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + }); + + describe("--trust", async () => { + const trustTests = [ + { + label: "only name", + packageJson: { + name: "foo", + }, + }, + { + label: "empty dependencies", + packageJson: { + name: "foo", + dependencies: {}, + }, + }, + { + label: "populated dependencies", + packageJson: { + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }, + }, + + { + label: "empty trustedDependencies", + packageJson: { + name: "foo", + trustedDependencies: [], + }, + }, + + { + label: "populated dependencies, empty trustedDependencies", + packageJson: { + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: [], + }, + }, + + { + label: "populated dependencies and trustedDependencies", + packageJson: { + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }, + }, + + { + label: "empty dependencies and trustedDependencies", + packageJson: { + name: "foo", + dependencies: {}, + trustedDependencies: [], + }, + }, + ]; + for (const { label, packageJson } of trustTests) { + test(label, async () => { + await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJson)); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "--trust", "uses-what-bin@1.0.0"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " installed uses-what-bin@1.0.0", + "", + " 2 packages installed", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + // another install should not error with json SyntaxError + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + }); + } + describe("packages without lifecycle scripts", async () => { + test("initial install", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "--trust", "no-deps@1.0.0"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + }); + + const err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + const out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " installed no-deps@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "1.0.0", + }, + }); + }); + test("already installed", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + }), + ); + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "no-deps"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " installed no-deps@2.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "^2.0.0", + }, + }); + + // oops, I wanted to run the lifecycle scripts for no-deps, I'll install + // again with --trust. + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i", "--trust", "no-deps"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + })); + + // oh, I didn't realize no-deps doesn't have + // any lifecycle scripts. It shouldn't automatically add to + // trustedDependencies. + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " installed no-deps@2.0.0", + "", + expect.stringContaining("done"), + "", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "no-deps"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "no-deps": "^2.0.0", + }, + }); + }); + }); + }); + describe("updating trustedDependencies", async () => { + test("existing trustedDependencies, unchanged trustedDependencies", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + trustedDependencies: ["uses-what-bin"], + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + uses-what-bin@1.0.0", + "", + expect.stringContaining("2 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + // no changes, lockfile shouldn't be saved + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + }); + test("existing trustedDependencies, removing trustedDependencies", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + trustedDependencies: ["uses-what-bin"], + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + uses-what-bin@1.0.0", + "", + expect.stringContaining("2 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + trustedDependencies: ["uses-what-bin"], + }); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + // this script should not run because uses-what-bin is no longer in trustedDependencies + await rm(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"), { force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 2 installs across 3 packages (no changes)", + ]); + expect(await exited).toBe(0); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }); + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + }); + test("non-existent trustedDependencies, then adding it", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + dependencies: { + "electron": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + electron@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + expect(await file(join(packageDir, "package.json")).json()).toEqual({ + name: "foo", + dependencies: { + "electron": "1.0.0", + }, + }); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + trustedDependencies: ["electron"], + dependencies: { + "electron": "1.0.0", + }, + }), + ); + + await rm(join(packageDir, "node_modules", "electron", "preinstall.txt"), { force: true }); + + // lockfile should save evenn though there are no changes to trustedDependencies due to + // the default list + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "electron", "preinstall.txt"))).toBeTrue(); + }); + }); }); } +describe("pm trust", async () => { + test("--default", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "trusted", "--default"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out).toContain("Default trusted dependencies"); + expect(await exited).toBe(0); + }); + describe("--all", async () => { + test("no dependencies", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "trusted", "--all"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).toContain("error: Lockfile not found"); + let out = await Bun.readableStreamToText(stdout); + expect(out).toBeEmpty(); + expect(await exited).toBe(1); + }); + test("some dependencies, non with scripts", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + dependencies: { + "uses-what-bin": "1.0.0", + }, + }), + ); + + let { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "i"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + }); + + let err = await Bun.readableStreamToText(stderr); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("warn:"); + let out = await Bun.readableStreamToText(stdout); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + " + uses-what-bin@1.0.0", + "", + expect.stringContaining("2 packages installed"), + "", + " Skipped ~1 script. Run `bun pm trusted` for details.", + "", + ]); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeFalse(); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "pm", "trust", "uses-what-bin"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + env, + })); + + err = await Bun.readableStreamToText(stderr); + expect(err).toBeEmpty(); + + out = await Bun.readableStreamToText(stdout); + expect(err).toBeEmpty(); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "node_modules", "uses-what-bin", "what-bin.txt"))).toBeTrue(); + }); + }); +}); + test("it should be able to find binary in node_modules/.bin from parent directory of root package", async () => { await mkdir(join(packageDir, "node_modules", ".bin"), { recursive: true }); await mkdir(join(packageDir, "morePackageDir")); diff --git a/test/cli/install/registry/packages/electron/electron-1.0.0.tgz b/test/cli/install/registry/packages/electron/electron-1.0.0.tgz new file mode 100644 index 0000000000..7f20d5c9b6 Binary files /dev/null and b/test/cli/install/registry/packages/electron/electron-1.0.0.tgz differ diff --git a/test/cli/install/registry/packages/electron/package.json b/test/cli/install/registry/packages/electron/package.json new file mode 100644 index 0000000000..924c1bef2d --- /dev/null +++ b/test/cli/install/registry/packages/electron/package.json @@ -0,0 +1,41 @@ +{ + "name": "electron", + "versions": { + "1.0.0": { + "name": "electron", + "version": "1.0.0", + "scripts": { + "preinstall": "bun index.js" + }, + "_id": "electron@1.0.0", + "_nodeVersion": "21.6.0", + "_npmVersion": "10.2.4", + "dist": { + "integrity": "sha512-GkuwCdn6o8Krsxb3DIIqYP+TAi8Y5jYUadmseZ6nR2op2k5ssdKRYo4JjYDGopa1ACrGAcQuWViz/+vX/WjYnA==", + "shasum": "f1b8bc2c23cd7e4f1500669dfaf8757578d2e391", + "tarball": "http://localhost:4873/electron/-/electron-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2024-02-15T01:16:09.439Z", + "created": "2024-02-15T01:16:09.439Z", + "1.0.0": "2024-02-15T01:16:09.439Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "electron-1.0.0.tgz": { + "shasum": "f1b8bc2c23cd7e4f1500669dfaf8757578d2e391", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "electron", + "readme": "ERROR: No README data found!" +} \ No newline at end of file