fix(install): lifecycle script changes (#8943)

* empty trustedDependencies

* tests

* handle edgecases with default trusted dependencies

* could be zero length

* --trusted and skipped scripts

* resolver too

* second run --trusted

* --trust, better formatting

* more tests

* --trusted applies to dep deps, more tests

* progress

* fix build

* fix crash, make it look good, comments

* alphabetize, verbose log

* feature flag

* update lockfile

* update skipped text

* check update requests first

* be more careful with inline strings

* only with scripts

* fix tests, todo tests

* fix another test

* fix merge

* fix fix merge

* check binding.gyp for tarball and git resolutions

* remove dead code

* debug assert

* move newline printing

* use enum for `__has_install_script`

* oops

* clone packages

* Update src/install/install.zig

Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>

---------

Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
This commit is contained in:
Dylan Conway
2024-03-07 19:22:21 -08:00
committed by GitHub
parent a927340ce3
commit d37fbbd4e0
24 changed files with 2648 additions and 523 deletions

View File

@@ -1,9 +1,12 @@
const std = @import("std");
const Progress = std.Progress;
const bun = @import("root").bun;
const Global = bun.Global;
const Output = bun.Output;
const string = bun.string;
const strings = bun.strings;
const log = bun.log;
const logger = bun.logger;
const Command = @import("../cli.zig").Command;
const Fs = @import("../fs.zig");
const Dependency = @import("../install/dependency.zig");
@@ -15,17 +18,22 @@ const Lockfile = @import("../install/lockfile.zig");
const NodeModulesFolder = Lockfile.Tree.NodeModulesFolder;
const Path = @import("../resolver/resolve_path.zig");
const String = @import("../install/semver.zig").String;
const ArrayIdentityContext = bun.ArrayIdentityContext;
const DepIdSet = std.ArrayHashMapUnmanaged(DependencyID, void, ArrayIdentityContext, false);
const Environment = bun.Environment;
fn handleLoadLockfileErrors(load_lockfile: Lockfile.LoadFromDiskResult, pm: *PackageManager) void {
if (load_lockfile == .not_found) {
if (pm.options.log_level != .silent)
Output.prettyErrorln("Lockfile not found", .{});
if (pm.options.log_level != .silent) {
Output.errGeneric("Lockfile not found", .{});
}
Global.exit(1);
}
if (load_lockfile == .err) {
if (pm.options.log_level != .silent)
Output.prettyErrorln("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
if (pm.options.log_level != .silent) {
Output.errGeneric("Error loading lockfile: {s}", .{@errorName(load_lockfile.err.value)});
}
Global.exit(1);
}
}
@@ -86,16 +94,20 @@ pub const PackageManagerCommand = struct {
Output.prettyln(
\\<b><blue>bun pm<r>: Package manager utilities
\\
\\ bun pm <b>bin<r> print the path to bin folder
\\ bun pm <b>-g bin<r> print the <b>global<r> path to bin folder
\\ bun pm <b>ls<r> list the dependency tree according to the current lockfile
\\ bun pm <b>ls<r> <cyan>--all<r> list the entire dependency tree according to the current lockfile
\\ bun pm <b>hash<r> generate & print the hash of the current lockfile
\\ bun pm <b>hash-string<r> print the string used to hash the lockfile
\\ bun pm <b>hash-print<r> print the hash stored in the current lockfile
\\ bun pm <b>cache<r> print the path to the cache folder
\\ bun pm <b>cache rm<r> clear the cache
\\ bun pm <b>migrate<r> migrate another package manager's lockfile without installing anything
\\ bun pm <b>bin<r> print the path to bin folder
\\ <d>└<r> <cyan>-g<r> bin print the <b>global<r> path to bin folder
\\ bun pm <b>ls<r> list the dependency tree according to the current lockfile
\\ <d>└<r> <cyan>--all<r> list the entire dependency tree according to the current lockfile
\\ bun pm <b>hash<r> generate & print the hash of the current lockfile
\\ bun pm <b>hash-string<r> print the string used to hash the lockfile
\\ bun pm <b>hash-print<r> print the hash stored in the current lockfile
\\ bun pm <b>cache<r> print the path to the cache folder
\\ bun pm <b>cache rm<r> clear the cache
\\ bun pm <b>migrate<r> migrate another package manager's lockfile without installing anything
\\ bun pm <b>trust(ed)<r> print current trusted and untrusted dependencies with scripts
\\ <d>├<r> \<packages, ...\> trust dependencies and run scripts
\\ <d>├<r> <cyan>--all<r> trust all untrusted dependencies and run their scripts
\\ <d>└<r> <cyan>--default<r> print the list of default trusted dependencies
\\
\\Learn more about these at <magenta>https://bun.sh/docs/cli/pm<r>
\\
@@ -155,7 +167,7 @@ pub const PackageManagerCommand = struct {
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
handleLoadLockfileErrors(load_lockfile, pm);
_ = try pm.lockfile.hasMetaHashChanged(false);
_ = try pm.lockfile.hasMetaHashChanged(false, pm.lockfile.packages.len);
Output.flush();
Output.disableBuffering();
@@ -175,7 +187,7 @@ pub const PackageManagerCommand = struct {
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
handleLoadLockfileErrors(load_lockfile, pm);
_ = try pm.lockfile.hasMetaHashChanged(true);
_ = try pm.lockfile.hasMetaHashChanged(true, pm.lockfile.packages.len);
Global.exit(0);
} else if (strings.eqlComptime(subcommand, "cache")) {
var dir: [bun.MAX_PATH_BYTES]u8 = undefined;
@@ -234,6 +246,379 @@ pub const PackageManagerCommand = struct {
}
Output.writer().writeAll(outpath) catch {};
Global.exit(0);
} else if (strings.eqlComptime(subcommand, "trusted") or (strings.eqlComptime(subcommand, "trust"))) {
// do this before loading lockfile because you don't need a lockfile
// to see the default trusted dependencies
if (strings.leftHasAnyInRight(args, &.{"--default"})) {
Output.print("Default trusted dependencies ({d}):\n", .{Lockfile.default_trusted_dependencies_list.len});
for (Lockfile.default_trusted_dependencies_list) |name| {
Output.pretty(" <d>-<r> {s}\n", .{name});
}
Global.exit(0);
}
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
handleLoadLockfileErrors(load_lockfile, pm);
try pm.updateLockfileIfNeeded(load_lockfile);
const buf = pm.lockfile.buffers.string_bytes.items;
if (args.len == 2) {
// no args, print information for trusted and untrusted dependencies with scripts.
const packages = pm.lockfile.packages.slice();
const metas: []Lockfile.Package.Meta = packages.items(.meta);
const scripts: []Lockfile.Package.Scripts = packages.items(.scripts);
const resolutions: []Install.Resolution = packages.items(.resolution);
var trusted_set: std.AutoArrayHashMapUnmanaged(u64, String) = .{};
var untrusted_dep_ids: std.AutoArrayHashMapUnmanaged(DependencyID, void) = .{};
defer untrusted_dep_ids.deinit(ctx.allocator);
// loop through all dependencies, print all the trusted packages, and collect
// untrusted packages with lifecycle scripts
for (pm.lockfile.buffers.dependencies.items, 0..) |dep, i| {
const dep_id: DependencyID = @intCast(i);
const package_id = pm.lockfile.buffers.resolutions.items[dep_id];
if (package_id == Install.invalid_package_id) continue;
// called alias because a dependency name is not always the package name
const alias = dep.name.slice(buf);
if (metas[package_id].hasInstallScript()) {
if (pm.lockfile.hasTrustedDependency(alias)) {
// can't put alias directly because it might be inline
try trusted_set.put(ctx.allocator, dep.name_hash, dep.name);
} else {
try untrusted_dep_ids.put(ctx.allocator, dep_id, {});
}
}
}
{
const Sorter = struct {
buf: string,
pub fn lessThan(this: @This(), rhs: String, lhs: String) bool {
return rhs.order(&lhs, this.buf, this.buf) == .lt;
}
};
const aliases = trusted_set.values();
std.sort.pdq(String, aliases, Sorter{ .buf = buf }, Sorter.lessThan);
Output.pretty("Trusted dependencies ({d}):\n", .{aliases.len});
for (aliases) |alias| {
Output.pretty(" <d>-<r> {s}\n", .{alias.slice(buf)});
} else {
Output.pretty("\n", .{});
}
trusted_set.deinit(ctx.allocator);
}
if (untrusted_dep_ids.count() == 0) {
Output.print("Untrusted dependencies (0):\n", .{});
Global.exit(0);
}
var untrusted_with_scripts: std.StringArrayHashMapUnmanaged(std.ArrayListUnmanaged(struct {
dep_id: DependencyID,
scripts_list: Lockfile.Package.Scripts.List,
})) = .{};
defer untrusted_with_scripts.deinit(ctx.allocator);
var tree_iterator = Lockfile.Tree.Iterator.init(pm.lockfile);
const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir);
var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{};
defer abs_node_modules_path.deinit(ctx.allocator);
try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash);
try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep);
while (tree_iterator.nextNodeModulesFolder(null)) |node_modules| {
// + 1 because we want to keep the path separator
abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1;
try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path);
var node_modules_dir = bun.openDir(std.fs.cwd(), node_modules.relative_path) catch |err| {
if (err == error.ENOENT) continue;
return err;
};
defer node_modules_dir.close();
for (node_modules.dependencies) |dep_id| {
if (untrusted_dep_ids.contains(dep_id)) {
const dep = pm.lockfile.buffers.dependencies.items[dep_id];
const alias = dep.name.slice(buf);
const package_id = pm.lockfile.buffers.resolutions.items[dep_id];
const resolution = &resolutions[package_id];
var package_scripts = scripts[package_id];
if (try package_scripts.getList(
pm.log,
pm.lockfile,
node_modules_dir,
abs_node_modules_path.items,
alias,
resolution,
)) |scripts_list| {
if (scripts_list.items.len == 0) continue;
const key = try ctx.allocator.dupe(u8, alias);
const gop = try untrusted_with_scripts.getOrPut(ctx.allocator, key);
if (!gop.found_existing) {
gop.value_ptr.* = .{};
} else {
ctx.allocator.free(key);
}
try gop.value_ptr.append(ctx.allocator, .{ .dep_id = dep_id, .scripts_list = scripts_list });
}
}
}
}
if (untrusted_with_scripts.count() == 0) {
Output.print("Untrusted dependencies (0):\n", .{});
Global.exit(0);
}
const Sorter = struct {
pub fn lessThan(_: void, rhs: string, lhs: string) bool {
return std.mem.order(u8, rhs, lhs) == .lt;
}
};
const aliases = untrusted_with_scripts.keys();
std.sort.pdq(string, aliases, {}, Sorter.lessThan);
try untrusted_with_scripts.reIndex(ctx.allocator);
Output.print("Untrusted dependencies ({d}):\n", .{aliases.len});
for (aliases) |alias| {
const _entry = untrusted_with_scripts.get(alias);
if (comptime bun.Environment.allow_assert) {
std.debug.assert(_entry != null);
}
if (_entry) |entry| {
if (comptime bun.Environment.allow_assert) {
std.debug.assert(entry.items.len > 0);
}
Output.pretty(" <d>-<r> {s}\n", .{alias});
}
}
Global.exit(0);
}
// this isn't great, flags could be in this slice, but it works
const packages_to_trust = args[2..];
const trust_all = strings.leftHasAnyInRight(args, &.{ "-a", "--all" });
const packages = pm.lockfile.packages.slice();
const metas: []Lockfile.Package.Meta = packages.items(.meta);
const resolutions: []Install.Resolution = packages.items(.resolution);
const scripts: []Lockfile.Package.Scripts = packages.items(.scripts);
var untrusted_dep_ids: DepIdSet = .{};
defer untrusted_dep_ids.deinit(ctx.allocator);
// .1 go through all installed dependencies and find untrusted ones with scripts
// from packages through cli, or all if --all.
// .2 iterate through node_modules folder and spawn lifecycle scripts for each
// untrusted dependency from step 1.
// .3 add the untrusted dependencies to package.json and lockfile.trusted_dependencies.
for (pm.lockfile.buffers.dependencies.items, 0..) |dep, i| {
const dep_id: u32 = @intCast(i);
const package_id = pm.lockfile.buffers.resolutions.items[dep_id];
if (package_id == Install.invalid_package_id) continue;
const alias = dep.name.slice(buf);
if (metas[package_id].hasInstallScript()) {
if (trust_all and !pm.lockfile.hasTrustedDependency(alias)) {
try untrusted_dep_ids.put(ctx.allocator, dep_id, {});
continue;
}
for (packages_to_trust) |package_name_from_cli| {
if (strings.eqlLong(package_name_from_cli, alias, true) and !pm.lockfile.hasTrustedDependency(alias)) {
try untrusted_dep_ids.put(ctx.allocator, dep_id, {});
continue;
}
}
}
}
if (untrusted_dep_ids.count() == 0) Global.exit(0);
// instead of running them right away, we group scripts by depth in the node_modules
// file structure, then run in descending order. this ensures lifecycle scripts are run
// in the correct order as they would during a normal install
var tree_iter = Lockfile.Tree.Iterator.init(pm.lockfile);
const top_level_without_trailing_slash = strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir);
var abs_node_modules_path: std.ArrayListUnmanaged(u8) = .{};
defer abs_node_modules_path.deinit(ctx.allocator);
try abs_node_modules_path.appendSlice(ctx.allocator, top_level_without_trailing_slash);
try abs_node_modules_path.append(ctx.allocator, std.fs.path.sep);
var package_names_to_add: std.StringArrayHashMapUnmanaged(void) = .{};
var scripts_at_depth: std.AutoArrayHashMapUnmanaged(usize, std.ArrayListUnmanaged(Lockfile.Package.Scripts.List)) = .{};
defer {
var iter = scripts_at_depth.iterator();
while (iter.next()) |entry| {
for (entry.value_ptr.items) |item| {
item.deinit(ctx.allocator);
}
entry.value_ptr.deinit(ctx.allocator);
}
scripts_at_depth.deinit(ctx.allocator);
package_names_to_add.deinit(ctx.allocator);
}
var scripts_count: usize = 0;
while (tree_iter.nextNodeModulesFolder(null)) |node_modules| {
abs_node_modules_path.items.len = top_level_without_trailing_slash.len + 1;
try abs_node_modules_path.appendSlice(ctx.allocator, node_modules.relative_path);
var node_modules_dir = bun.openDir(std.fs.cwd(), node_modules.relative_path) catch |err| {
if (err == error.ENOENT) continue;
return err;
};
defer node_modules_dir.close();
for (node_modules.dependencies) |dep_id| {
if (untrusted_dep_ids.contains(dep_id)) {
const dep = pm.lockfile.buffers.dependencies.items[dep_id];
const alias = dep.name.slice(buf);
const package_id = pm.lockfile.buffers.resolutions.items[dep_id];
const resolution = &resolutions[package_id];
var package_scripts = scripts[package_id];
if (try package_scripts.getList(
pm.log,
pm.lockfile,
node_modules_dir,
abs_node_modules_path.items,
alias,
resolution,
)) |scripts_list| {
const entry = try scripts_at_depth.getOrPut(ctx.allocator, node_modules.depth);
if (!entry.found_existing) {
entry.value_ptr.* = .{};
}
scripts_count += scripts_list.total;
try entry.value_ptr.append(ctx.allocator, scripts_list);
try package_names_to_add.put(ctx.allocator, try ctx.allocator.dupe(u8, alias), {});
}
}
}
}
if (scripts_at_depth.count() == 0) Global.exit(0);
var root_node: *Progress.Node = undefined;
var scripts_node: Progress.Node = undefined;
var progress = &pm.progress;
if (pm.options.log_level.showProgress()) {
root_node = progress.start("", 0);
progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr;
scripts_node = root_node.start(PackageManager.ProgressStrings.script(), scripts_count);
pm.scripts_node = &scripts_node;
}
var depth = scripts_at_depth.count();
while (depth > 0) {
depth -= 1;
const _entry = scripts_at_depth.get(depth);
if (comptime bun.Environment.allow_assert) {
std.debug.assert(_entry != null);
}
if (_entry) |entry| {
for (entry.items) |scripts_list| {
switch (pm.options.log_level) {
inline else => |log_level| try pm.spawnPackageLifecycleScripts(ctx, scripts_list, log_level),
}
if (pm.options.log_level.showProgress()) {
scripts_node.activate();
progress.refresh();
}
}
while (pm.pending_lifecycle_script_tasks.load(.Monotonic) > 0) {
pm.uws_event_loop.tick();
}
}
}
if (pm.options.log_level.showProgress()) {
progress.root.end();
progress.* = .{};
}
const package_json_contents = try pm.root_package_json_file.readToEndAlloc(ctx.allocator, try pm.root_package_json_file.getEndPos());
defer ctx.allocator.free(package_json_contents);
const package_json_source = logger.Source.initPathString(PackageManager.package_json_cwd, package_json_contents);
var package_json = bun.JSON.ParseJSONUTF8(&package_json_source, ctx.log, ctx.allocator) catch |err| {
switch (Output.enable_ansi_colors) {
inline else => |enable_ansi_colors| ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {},
}
if (err == error.ParserError and ctx.log.errors > 0) {
Output.prettyErrorln("<red>error<r>: Failed to parse package.json", .{});
Global.crash();
}
Output.panic("<r><red>{s}<r> parsing package.json<r>", .{
@errorName(err),
});
};
// now add the package names to lockfile.trustedDependencies and package.json `trustedDependencies`
const names_count = package_names_to_add.count();
if (comptime Environment.allow_assert) {
std.debug.assert(names_count > 0);
}
// could be null if these are the first packages to be trusted
if (names_count > 0 and pm.lockfile.trusted_dependencies == null) pm.lockfile.trusted_dependencies = .{};
const names = package_names_to_add.keys();
try Install.PackageManager.PackageJSONEditor.editTrustedDependencies(ctx.allocator, &package_json, names);
for (names) |name| {
try pm.lockfile.trusted_dependencies.?.put(ctx.allocator, @truncate(String.Builder.stringHash(name)), {});
}
pm.lockfile.saveToDisk(pm.options.lockfile_path);
var buffer_writer = try bun.js_printer.BufferWriter.init(ctx.allocator);
try buffer_writer.buffer.list.ensureTotalCapacity(ctx.allocator, package_json_contents.len + 1);
buffer_writer.append_newline = package_json_contents.len > 0 and package_json_contents[package_json_contents.len - 1] == '\n';
var package_json_writer = bun.js_printer.BufferPrinter.init(buffer_writer);
_ = bun.js_printer.printJSON(@TypeOf(&package_json_writer), &package_json_writer, package_json, &package_json_source) catch |err| {
Output.prettyErrorln("package.json failed to write due to error {s}", .{@errorName(err)});
Global.crash();
};
const new_package_json_contents = package_json_writer.ctx.writtenWithoutTrailingZero();
try pm.root_package_json_file.pwriteAll(new_package_json_contents, 0);
std.os.ftruncate(pm.root_package_json_file.handle, new_package_json_contents.len) catch {};
pm.root_package_json_file.close();
Global.exit(0);
} else if (strings.eqlComptime(subcommand, "ls")) {
const load_lockfile = pm.lockfile.loadFromDisk(ctx.allocator, ctx.log, "bun.lockb");
@@ -244,6 +629,8 @@ pub const PackageManagerCommand = struct {
const lockfile = load_lockfile.ok.lockfile;
var iterator = Lockfile.Tree.Iterator.init(lockfile);
var max_depth: usize = 0;
var directories = std.ArrayList(NodeModulesFolder).init(ctx.allocator);
defer directories.deinit();
while (iterator.nextNodeModulesFolder(null)) |node_modules| {
@@ -255,17 +642,20 @@ pub const PackageManagerCommand = struct {
const dependencies = try ctx.allocator.alloc(DependencyID, node_modules.dependencies.len);
bun.copy(DependencyID, dependencies, node_modules.dependencies);
if (max_depth < node_modules.depth + 1) max_depth = node_modules.depth + 1;
try directories.append(.{
.relative_path = path[0..path_len :0],
.dependencies = dependencies,
.tree_id = node_modules.tree_id,
.depth = node_modules.depth,
});
}
const first_directory = directories.orderedRemove(0);
// TODO: find max depth beforehand
var more_packages = [_]bool{false} ** 16;
var more_packages = try ctx.allocator.alloc(bool, max_depth);
@memset(more_packages, false);
if (first_directory.dependencies.len > 1) more_packages[0] = true;
if (strings.leftHasAnyInRight(args, &.{ "-A", "-a", "--all" })) {
@@ -355,7 +745,7 @@ fn printNodeModulesFolderStructure(
depth: usize,
directories: *std.ArrayList(NodeModulesFolder),
lockfile: *Lockfile,
more_packages_: [16]bool,
more_packages_: []bool,
) !void {
const allocator = lockfile.allocator;
var more_packages = more_packages_;