From 351e47355a90d6e7268487b7035bdf6375c97b15 Mon Sep 17 00:00:00 2001 From: Dylan Conway <35280289+dylan-conway@users.noreply.github.com> Date: Mon, 11 Dec 2023 22:08:25 -0800 Subject: [PATCH] add default `trustedDependencies` and run lifecycle scripts during installation (#7132) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [install] allow parallel execution of `postinstall` scripts - fixes spurious exit code 42 from `spawn()` * postinstall to a pipe * feat(install): include top 500 packages as defaults for postinstall * windows newline handling* *i did not test it * stuff * cool * a * fix merge * set `has_trusted_dependencies` * fix a bunch of tests * fix merge * remove `PackageManager` * remove commented code * change to function * Update lockfile.zig * run scripts if added to `trustedDependencies` after install * packages without `resolved` properties * node-gyp scripts * node-gyp script in the root * another test * git deps run prepare scripts * fix merge * run lifecycle scripts during installation * Update lockfile.zig * always increment * 🏗️ * update tests * tickWIthoutIdle * const uws * loop forwards through trees * single buffer bitset list * tag.isGit * windows path separators * `bun.sys.read` and enable/disable buffering * fix test and waiter thread * waiter thread and tests * Update bun-install-registry.test.ts * workspace exclude `preprepare` and `postprepare` * Create esbuild.test.ts * make sure length is the same * remove deferred binlinks, add estrella test * test with another version * address some comments * remove .verdaccio-db.json * ooops * fix build * use `pid` to wait * dont register pid_poll when using waiter thread * stress test * free * fix failing tests * fix linux crash, snapshot stress test * oops * concurrent scripts * activate as soon as possible * test * delete stress test packages * remove unused packages * comment stress test and maybe fix segfault * delete snapshot * fix assertion * use cpu_count * 2 for default concurrent scripts * gear emoji * add --concurrent-scripts to docs * more docs --------- Co-authored-by: alexlamsl Co-authored-by: dave caruso Co-authored-by: Dylan Conway <33744874+MilesWright7@users.noreply.github.com> Co-authored-by: Jarred Sumner --- .gitignore | 2 + .vscode/launch.json | 16 +- docs/cli/bun-install.md | 3 + docs/cli/install.md | 9 + docs/install/index.md | 3 + src/StaticHashMap.zig | 756 +++++++++ src/api/schema.d.ts | 1 + src/api/schema.js | 10 + src/api/schema.peechy | 1 + src/api/schema.zig | 10 + src/async/posix_event_loop.zig | 52 + src/async/windows_event_loop.zig | 18 + src/bit_set.zig | 57 + src/bun.js/api/bun/subprocess.zig | 185 ++- src/bun.js/bindings/bindings.zig | 2 +- src/bun.js/node/node_fs.zig | 6 +- src/bun.zig | 1 + src/bunfig.zig | 8 + src/cli/bunx_command.zig | 9 +- src/cli/package_manager_command.zig | 3 +- src/cli/run_command.zig | 699 ++++++-- src/env_loader.zig | 8 +- src/install/default-trusted-dependencies.txt | 500 ++++++ src/install/install-scripts-allowlist.txt | 4 - src/install/install.zig | 950 ++++++++--- src/install/lockfile.zig | 301 +++- src/install/resolution.zig | 8 + src/sys.zig | 33 +- test/cli/install/bun-add.test.ts | 28 +- test/cli/install/bun-install.test.ts | 172 +- .../migration/complex-workspace.test.ts | 8 +- test/cli/install/overrides.test.ts | 8 +- test/cli/install/qux-0.0.2.tgz | Bin 283 -> 286 bytes .../registry/bun-install-registry.test.ts | 1416 +++++++++++++++-- .../registry/packages/.verdaccio-db.json | 1 - .../all-lifecycle-scripts-1.0.0.tgz | Bin 0 -> 536 bytes .../all-lifecycle-scripts/package.json | 46 + .../binding-gyp-scripts-1.5.0.tgz | Bin 0 -> 343 bytes .../packages/binding-gyp-scripts/package.json | 32 +- .../has-bin-entries/has-bin-entries-1.5.0.tgz | Bin 0 -> 751 bytes .../packages/has-bin-entries/package.json | 35 +- ...fecycle-failing-postinstall-slow-1.0.0.tgz | Bin 0 -> 203 bytes .../package.json | 41 + .../lifecycle-failing-postinstall-1.0.0.tgz | Bin 0 -> 193 bytes .../package.json | 41 + .../lifecycle-init-cwd-1.0.0.tgz | Bin 0 -> 315 bytes .../packages/lifecycle-init-cwd/package.json | 41 + .../lifecycle-postinstall-1.0.0.tgz | Bin 0 -> 335 bytes .../lifecycle-postinstall/package.json | 41 + .../packages/node-gyp/node-gyp-1.5.0.tgz | Bin 0 -> 428 bytes .../registry/packages/node-gyp/package.json | 28 +- .../one-dep-scripted-1.5.0.tgz | Bin 0 -> 340 bytes .../packages/one-dep-scripted/package.json | 31 +- .../packages/uses-what-bin-slow/package.json | 44 + .../uses-what-bin-slow-1.0.0.tgz | Bin 0 -> 202 bytes .../packages/with-postinstall-a/package.json | 45 + .../with-postinstall-a-1.0.0.tgz | Bin 0 -> 253 bytes .../packages/with-postinstall-b/package.json | 43 + .../with-postinstall-b-1.0.0.tgz | Bin 0 -> 247 bytes test/harness.ts | 25 - test/integration/esbuild/build-file.js | 2 + test/integration/esbuild/esbuild.test.ts | 205 +++ 62 files changed, 5302 insertions(+), 686 deletions(-) create mode 100644 src/StaticHashMap.zig create mode 100644 src/install/default-trusted-dependencies.txt delete mode 100644 src/install/install-scripts-allowlist.txt delete mode 100644 test/cli/install/registry/packages/.verdaccio-db.json create mode 100644 test/cli/install/registry/packages/all-lifecycle-scripts/all-lifecycle-scripts-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/all-lifecycle-scripts/package.json create mode 100644 test/cli/install/registry/packages/binding-gyp-scripts/binding-gyp-scripts-1.5.0.tgz create mode 100644 test/cli/install/registry/packages/has-bin-entries/has-bin-entries-1.5.0.tgz create mode 100644 test/cli/install/registry/packages/lifecycle-failing-postinstall-slow/lifecycle-failing-postinstall-slow-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/lifecycle-failing-postinstall-slow/package.json create mode 100644 test/cli/install/registry/packages/lifecycle-failing-postinstall/lifecycle-failing-postinstall-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/lifecycle-failing-postinstall/package.json create mode 100644 test/cli/install/registry/packages/lifecycle-init-cwd/lifecycle-init-cwd-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/lifecycle-init-cwd/package.json create mode 100644 test/cli/install/registry/packages/lifecycle-postinstall/lifecycle-postinstall-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/lifecycle-postinstall/package.json create mode 100644 test/cli/install/registry/packages/node-gyp/node-gyp-1.5.0.tgz create mode 100644 test/cli/install/registry/packages/one-dep-scripted/one-dep-scripted-1.5.0.tgz create mode 100644 test/cli/install/registry/packages/uses-what-bin-slow/package.json create mode 100644 test/cli/install/registry/packages/uses-what-bin-slow/uses-what-bin-slow-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/with-postinstall-a/package.json create mode 100644 test/cli/install/registry/packages/with-postinstall-a/with-postinstall-a-1.0.0.tgz create mode 100644 test/cli/install/registry/packages/with-postinstall-b/package.json create mode 100644 test/cli/install/registry/packages/with-postinstall-b/with-postinstall-b-1.0.0.tgz create mode 100644 test/integration/esbuild/build-file.js create mode 100644 test/integration/esbuild/esbuild.test.ts diff --git a/.gitignore b/.gitignore index e7ce1a4349..78febd9761 100644 --- a/.gitignore +++ b/.gitignore @@ -160,3 +160,5 @@ x64 /build-*/ .vs + +**/.verdaccio-db.json \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index ece9a9fb51..75d5094850 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,6 +6,20 @@ "version": "0.2.0", "configurations": [ + { + "type": "lldb", + "request": "launch", + "name": "sharp", + "program": "bun-debug", + "args": ["install", "sharp"], + // The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI. + "cwd": "/tmp/scratchpad_20230911T213851", + "env": { + "FORCE_COLOR": "1", + "BUN_GARBAGE_COLLECTOR_LEVEL": "2" + }, + "console": "internalConsole" + }, { "type": "lldb", "request": "launch", @@ -307,7 +321,7 @@ "name": "bun install", "program": "bun-debug", "args": ["install"], - "cwd": "/Users/jarred/Build/worky", + "cwd": "${fileDirname}", "console": "internalConsole", "env": {} }, diff --git a/docs/cli/bun-install.md b/docs/cli/bun-install.md index c7dfcae5d5..20832cc53e 100644 --- a/docs/cli/bun-install.md +++ b/docs/cli/bun-install.md @@ -62,6 +62,9 @@ dev = true # Install peerDependencies (default: true) peer = true +# Max number of concurrent lifecycle scripts (default: (cpu count or GOMAXPROCS) x2) +concurrentScripts = 16 + # When using `bun install -g`, install packages here globalDir = "~/.bun/install/global" diff --git a/docs/cli/install.md b/docs/cli/install.md index e3607e55fa..932f05a574 100644 --- a/docs/cli/install.md +++ b/docs/cli/install.md @@ -60,6 +60,12 @@ To tell Bun to allow lifecycle scripts for a particular package, add the package Then re-install the package. Bun will read this field and run lifecycle scripts for `my-trusted-package`. +Lifecycle scripts will run in parallel during installation. To adjust the maximum number of concurrent scripts, use the `--concurrent-scripts` flag. The default is two times the reported cpu count or GOMAXPROCS. + +```bash +$ bun install --concurrent-scripts 5 +``` + ## Workspaces Bun supports `"workspaces"` in package.json. For complete documentation refer to [Package manager > Workspaces](/docs/install/workspaces). @@ -172,6 +178,9 @@ frozenLockfile = false # equivalent to `--dry-run` flag dryRun = false + +# equivalent to `--concurrent-scripts` flag +concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2 ``` ## CI/CD diff --git a/docs/install/index.md b/docs/install/index.md index 8f0aaf3782..cdf41b9311 100644 --- a/docs/install/index.md +++ b/docs/install/index.md @@ -91,6 +91,9 @@ frozenLockfile = false # equivalent to `--dry-run` flag dryRun = false + +# equivalent to `--concurrent-scripts` flag +concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2 ``` {% /details %} diff --git a/src/StaticHashMap.zig b/src/StaticHashMap.zig new file mode 100644 index 0000000000..e0cbb7cc6d --- /dev/null +++ b/src/StaticHashMap.zig @@ -0,0 +1,756 @@ +// https://github.com/lithdew/rheia/blob/162293d0f0e8d6572a8954c0add83f13f76b3cc6/hash_map.zig +// Apache License 2.0 +const std = @import("std"); + +const mem = std.mem; +const math = std.math; +const testing = std.testing; + +const assert = std.debug.assert; + +pub fn AutoHashMap(comptime K: type, comptime V: type, comptime max_load_percentage: comptime_int) type { + return HashMap(K, V, std.hash_map.AutoContext(K), max_load_percentage); +} + +pub fn AutoStaticHashMap(comptime K: type, comptime V: type, comptime capacity: comptime_int) type { + return StaticHashMap(K, V, std.hash_map.AutoContext(K), capacity); +} + +pub fn StaticHashMap(comptime K: type, comptime V: type, comptime Context: type, comptime capacity: usize) type { + assert(math.isPowerOfTwo(capacity)); + + const shift = 63 - math.log2_int(u64, capacity) + 1; + const overflow = capacity / 10 + (63 - @as(u64, shift) + 1) << 1; + + return struct { + const empty_hash = math.maxInt(u64); + + pub const Entry = struct { + hash: u64 = empty_hash, + key: K = std.mem.zeroes(K), + value: V = std.mem.zeroes(V), + + pub fn isEmpty(self: Entry) bool { + return self.hash == empty_hash; + } + + pub fn format(self: Entry, comptime layout: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + _ = layout; + _ = options; + try std.fmt.format(writer, "(hash: {}, key: {}, value: {})", .{ self.hash, self.key, self.value }); + } + }; + + pub const GetOrPutResult = struct { + value_ptr: *V, + found_existing: bool, + }; + + const Self = @This(); + + entries: [capacity + overflow]Entry = [_]Entry{.{}} ** (capacity + overflow), + len: usize = 0, + shift: u6 = shift, + + // put_probe_count: usize = 0, + // get_probe_count: usize = 0, + // del_probe_count: usize = 0, + + pub usingnamespace HashMapMixin(Self, K, V, Context); + }; +} + +pub fn HashMap(comptime K: type, comptime V: type, comptime Context: type, comptime max_load_percentage: comptime_int) type { + return struct { + const empty_hash = math.maxInt(u64); + + pub const Entry = struct { + hash: u64 = empty_hash, + key: K = undefined, + value: V = undefined, + + pub fn isEmpty(self: Entry) bool { + return self.hash == empty_hash; + } + + pub fn format(self: Entry, comptime layout: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + _ = layout; + _ = options; + try std.fmt.format(writer, "(hash: {}, key: {}, value: {})", .{ self.hash, self.key, self.value }); + } + }; + + pub const GetOrPutResult = struct { + value_ptr: *V, + found_existing: bool, + }; + + const Self = @This(); + + entries: [*]Entry, + len: usize = 0, + shift: u6, + + // put_probe_count: usize = 0, + // get_probe_count: usize = 0, + // del_probe_count: usize = 0, + + pub usingnamespace HashMapMixin(Self, K, V, Context); + + pub fn initCapacity(gpa: mem.Allocator, capacity: u64) !Self { + assert(math.isPowerOfTwo(capacity)); + + const shift = 63 - math.log2_int(u64, capacity) + 1; + const overflow = capacity / 10 + (63 - @as(u64, shift) + 1) << 1; + + const entries = try gpa.alloc(Entry, @as(usize, @intCast(capacity + overflow))); + @memset(entries, .{}); + + return Self{ + .entries = entries.ptr, + .shift = shift, + }; + } + + pub fn deinit(self: *Self, gpa: mem.Allocator) void { + gpa.free(self.slice()); + } + + pub fn ensureUnusedCapacity(self: *Self, gpa: mem.Allocator, count: usize) !void { + try self.ensureTotalCapacity(gpa, self.len + count); + } + + pub fn ensureTotalCapacity(self: *Self, gpa: mem.Allocator, count: usize) !void { + while (true) { + const capacity = @as(u64, 1) << (63 - self.shift + 1); + if (count <= capacity * max_load_percentage / 100) { + break; + } + try self.grow(gpa); + } + } + + fn grow(self: *Self, gpa: mem.Allocator) !void { + const capacity = @as(u64, 1) << (63 - self.shift + 1); + const overflow = capacity / 10 + (63 - @as(usize, self.shift) + 1) << 1; + const end = self.entries + @as(usize, @intCast(capacity + overflow)); + + var map = try Self.initCapacity(gpa, @as(usize, @intCast(capacity * 2))); + var src = self.entries; + var dst = map.entries; + + while (src != end) { + const entry = src[0]; + + const i = if (!entry.isEmpty()) entry.hash >> map.shift else 0; + const p = map.entries + i; + + dst = if (@intFromPtr(p) >= @intFromPtr(dst)) p else dst; + dst[0] = entry; + + src += 1; + dst += 1; + } + + self.deinit(gpa); + self.entries = map.entries; + self.shift = map.shift; + } + + pub fn put(self: *Self, gpa: mem.Allocator, key: K, value: V) !void { + try self.putContext(gpa, key, value, undefined); + } + + pub fn putContext(self: *Self, gpa: mem.Allocator, key: K, value: V, ctx: Context) !void { + try self.ensureUnusedCapacity(gpa, 1); + self.putAssumeCapacityContext(key, value, ctx); + } + + pub fn getOrPut(self: *Self, gpa: mem.Allocator, key: K) !GetOrPutResult { + return try self.getOrPutContext(gpa, key, undefined); + } + + pub fn getOrPutContext(self: *Self, gpa: mem.Allocator, key: K, ctx: Context) !GetOrPutResult { + try self.ensureUnusedCapacity(gpa, 1); + return self.getOrPutAssumeCapacityContext(key, ctx); + } + }; +} + +fn HashMapMixin( + comptime Self: type, + comptime K: type, + comptime V: type, + comptime Context: type, +) type { + return struct { + pub fn clearRetainingCapacity(self: *Self) void { + @memset(self.slice(), .{}); + self.len = 0; + } + + pub fn slice(self: *Self) []Self.Entry { + const capacity = @as(u64, 1) << (63 - self.shift + 1); + const overflow = capacity / 10 + (63 - @as(usize, self.shift) + 1) << 1; + return self.entries[0..@as(usize, @intCast(capacity + overflow))]; + } + + pub fn putAssumeCapacity(self: *Self, key: K, value: V) void { + self.putAssumeCapacityContext(key, value, undefined); + } + + pub fn putAssumeCapacityContext(self: *Self, key: K, value: V, ctx: Context) void { + const result = self.getOrPutAssumeCapacityContext(key, ctx); + if (!result.found_existing) result.value_ptr.* = value; + } + + pub fn getOrPutAssumeCapacity(self: *Self, key: K) Self.GetOrPutResult { + return self.getOrPutAssumeCapacityContext(key, undefined); + } + + pub fn getOrPutAssumeCapacityContext(self: *Self, key: K, ctx: Context) Self.GetOrPutResult { + var it: Self.Entry = .{ .hash = ctx.hash(key), .key = key, .value = undefined }; + var i = it.hash >> self.shift; + + assert(it.hash != Self.empty_hash); + + var inserted_at: ?usize = null; + while (true) : (i += 1) { + const entry = self.entries[i]; + if (entry.hash >= it.hash) { + if (ctx.eql(entry.key, key)) { + return .{ .found_existing = true, .value_ptr = &self.entries[i].value }; + } + self.entries[i] = it; + if (entry.isEmpty()) { + self.len += 1; + return .{ .found_existing = false, .value_ptr = &self.entries[inserted_at orelse i].value }; + } + if (inserted_at == null) { + inserted_at = i; + } + it = entry; + } + // self.put_probe_count += 1; + } + } + + pub fn get(self: *Self, key: K) ?V { + return self.getContext(key, undefined); + } + + pub fn getContext(self: *Self, key: K, ctx: Context) ?V { + const hash = ctx.hash(key); + assert(hash != Self.empty_hash); + + var i = hash >> self.shift; + while (true) : (i += 1) { + const entry = self.entries[i]; + if (entry.hash >= hash) { + if (!ctx.eql(entry.key, key)) { + return null; + } + return entry.value; + } + // self.get_probe_count += 1; + } + } + + pub fn has(self: *Self, key: K) bool { + return self.hasContext(key, undefined); + } + + pub fn hasContext(self: *Self, key: K, ctx: Context) bool { + const hash = ctx.hash(key); + assert(hash != Self.empty_hash); + + var i = hash >> self.shift; + while (true) : (i += 1) { + const entry = self.entries[i]; + if (entry.hash >= hash) { + if (!ctx.eql(entry.key, key)) { + return false; + } + return true; + } + // self.get_probe_count += 1; + } + } + + pub fn delete(self: *Self, key: K) ?V { + return self.deleteContext(key, undefined); + } + + pub fn deleteContext(self: *Self, key: K, ctx: Context) ?V { + const hash = ctx.hash(key); + assert(hash != Self.empty_hash); + + var i = hash >> self.shift; + while (true) : (i += 1) { + const entry = self.entries[i]; + if (entry.hash >= hash) { + if (!ctx.eql(entry.key, key)) { + return null; + } + break; + } + // self.del_probe_count += 1; + } + + const value = self.entries[i].value; + + while (true) : (i += 1) { + const j = self.entries[i + 1].hash >> self.shift; + if (i < j or self.entries[i + 1].isEmpty()) { + break; + } + self.entries[i] = self.entries[i + 1]; + // self.del_probe_count += 1; + } + self.entries[i] = .{}; + self.len -= 1; + + return value; + } + }; +} + +pub fn SortedHashMap(comptime V: type, comptime max_load_percentage: comptime_int) type { + return struct { + const empty_hash: [32]u8 = [_]u8{0xFF} ** 32; + + pub const Entry = struct { + hash: [32]u8 = empty_hash, + value: V = undefined, + + pub fn isEmpty(self: Entry) bool { + return cmp(self.hash, empty_hash) == .eq; + } + + pub fn format(self: Entry, comptime layout: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void { + _ = layout; + _ = options; + try std.fmt.format(writer, "(hash: {}, value: {})", .{ std.fmt.fmtSliceHexLower(mem.asBytes(&self.hash)), self.value }); + } + }; + + const Self = @This(); + + entries: [*]Entry, + len: usize = 0, + shift: u6, + + // put_probe_count: usize = 0, + // get_probe_count: usize = 0, + // del_probe_count: usize = 0, + + pub fn init(gpa: mem.Allocator) !Self { + return Self.initCapacity(gpa, 16); + } + + pub fn initCapacity(gpa: mem.Allocator, capacity: u64) !Self { + assert(math.isPowerOfTwo(capacity)); + + const shift = 63 - math.log2_int(u64, capacity) + 1; + const overflow = capacity / 10 + (63 - @as(u64, shift) + 1) << 1; + + const entries = try gpa.alloc(Entry, @as(usize, @intCast(capacity + overflow))); + @memset(entries, Entry{}); + + return Self{ + .entries = entries.ptr, + .shift = shift, + }; + } + + pub fn deinit(self: *Self, gpa: mem.Allocator) void { + gpa.free(self.slice()); + } + + /// The following routine has its branches optimized against inputs that are cryptographic hashes by + /// assuming that if the first 64 bits of 'a' and 'b' are equivalent, then 'a' and 'b' are most likely + /// equivalent. + fn cmp(a: [32]u8, b: [32]u8) math.Order { + const msa = @as(u64, @bitCast(a[0..8].*)); + const msb = @as(u64, @bitCast(b[0..8].*)); + if (msa != msb) { + return if (mem.bigToNative(u64, msa) < mem.bigToNative(u64, msb)) .lt else .gt; + } else if (@reduce(.And, @as(@Vector(32, u8), a) == @as(@Vector(32, u8), b))) { + return .eq; + } else { + switch (math.order(mem.readIntBig(u64, a[8..16]), mem.readIntBig(u64, b[8..16]))) { + .eq => {}, + .lt => return .lt, + .gt => return .gt, + } + switch (math.order(mem.readIntBig(u64, a[16..24]), mem.readIntBig(u64, b[16..24]))) { + .eq => {}, + .lt => return .lt, + .gt => return .gt, + } + return math.order(mem.readIntBig(u64, a[24..32]), mem.readIntBig(u64, b[24..32])); + } + } + + /// In release-fast mode, LLVM will optimize this routine to utilize 109 cycles. This routine scatters + /// hash values across a table into buckets which are lexicographically ordered from one another in + /// ascending order. + fn idx(a: [32]u8, shift: u6) usize { + return @as(usize, @intCast(mem.readIntBig(u64, a[0..8]) >> shift)); + } + + pub fn clearRetainingCapacity(self: *Self) void { + @memset(self.slice(), Entry{}); + self.len = 0; + } + + pub fn slice(self: *Self) []Entry { + const capacity = @as(u64, 1) << (63 - self.shift + 1); + const overflow = capacity / 10 + (63 - @as(usize, self.shift) + 1) << 1; + return self.entries[0..@as(usize, @intCast(capacity + overflow))]; + } + + pub fn ensureUnusedCapacity(self: *Self, gpa: mem.Allocator, count: usize) !void { + try self.ensureTotalCapacity(gpa, self.len + count); + } + + pub fn ensureTotalCapacity(self: *Self, gpa: mem.Allocator, count: usize) !void { + while (true) { + const capacity = @as(u64, 1) << (63 - self.shift + 1); + if (count <= capacity * max_load_percentage / 100) { + break; + } + try self.grow(gpa); + } + } + + fn grow(self: *Self, gpa: mem.Allocator) !void { + const capacity = @as(u64, 1) << (63 - self.shift + 1); + const overflow = capacity / 10 + (63 - @as(usize, self.shift) + 1) << 1; + const end = self.entries + @as(usize, @intCast(capacity + overflow)); + + var map = try Self.initCapacity(gpa, @as(usize, @intCast(capacity * 2))); + var src = self.entries; + var dst = map.entries; + + while (src != end) { + const entry = src[0]; + + const i = if (!entry.isEmpty()) idx(entry.hash, map.shift) else 0; + const p = map.entries + i; + + dst = if (@intFromPtr(p) >= @intFromPtr(dst)) p else dst; + dst[0] = entry; + + src += 1; + dst += 1; + } + + self.deinit(gpa); + self.entries = map.entries; + self.shift = map.shift; + } + + pub fn put(self: *Self, gpa: mem.Allocator, key: [32]u8, value: V) !void { + try self.ensureUnusedCapacity(gpa, 1); + self.putAssumeCapacity(key, value); + } + + pub fn putAssumeCapacity(self: *Self, key: [32]u8, value: V) void { + const result = self.getOrPutAssumeCapacity(key); + if (!result.found_existing) result.value_ptr.* = value; + } + + pub const GetOrPutResult = struct { + value_ptr: *V, + found_existing: bool, + }; + + pub fn getOrPut(self: *Self, gpa: mem.Allocator, key: [32]u8) !GetOrPutResult { + try self.ensureUnusedCapacity(gpa, 1); + return self.getOrPutAssumeCapacity(key); + } + + pub fn getOrPutAssumeCapacity(self: *Self, key: [32]u8) GetOrPutResult { + assert(self.len < (@as(u64, 1) << (63 - self.shift + 1))); + assert(cmp(key, empty_hash) != .eq); + + var it: Entry = .{ .hash = key, .value = undefined }; + var i = idx(key, self.shift); + + var inserted_at: ?usize = null; + while (true) : (i += 1) { + const entry = self.entries[i]; + if (cmp(entry.hash, it.hash).compare(.gte)) { + if (cmp(entry.hash, key) == .eq) { + return .{ .found_existing = true, .value_ptr = &self.entries[i].value }; + } + self.entries[i] = it; + if (entry.isEmpty()) { + self.len += 1; + return .{ .found_existing = false, .value_ptr = &self.entries[inserted_at orelse i].value }; + } + if (inserted_at == null) { + inserted_at = i; + } + it = entry; + } + self.put_probe_count += 1; + } + } + + pub fn get(self: *Self, key: [32]u8) ?V { + assert(cmp(key, empty_hash) != .eq); + + var i = idx(key, self.shift); + while (true) : (i += 1) { + const entry = self.entries[i]; + if (cmp(entry.hash, key).compare(.gte)) { + if (cmp(entry.hash, key) != .eq) { + return null; + } + return entry.value; + } + // self.get_probe_count += 1; + } + } + + pub fn delete(self: *Self, key: [32]u8) ?V { + assert(cmp(key, empty_hash) != .eq); + + var i = idx(key, self.shift); + while (true) : (i += 1) { + const entry = self.entries[i]; + if (cmp(entry.hash, key).compare(.gte)) { + if (cmp(entry.hash, key) != .eq) { + return null; + } + break; + } + self.del_probe_count += 1; + } + + const value = self.entries[i].value; + + while (true) : (i += 1) { + const j = idx(self.entries[i + 1].hash, self.shift); + if (i < j or self.entries[i + 1].isEmpty()) { + break; + } + self.entries[i] = self.entries[i + 1]; + self.del_probe_count += 1; + } + self.entries[i] = .{}; + self.len -= 1; + + return value; + } + }; +} + +test "StaticHashMap: put, get, delete, grow" { + var map: AutoStaticHashMap(usize, usize, 512) = .{}; + + var seed: usize = 0; + while (seed < 128) : (seed += 1) { + var rng = std.rand.DefaultPrng.init(seed); + + const keys = try testing.allocator.alloc(usize, 512); + defer testing.allocator.free(keys); + + for (keys) |*key| key.* = @as(usize, rng.next()); + + try testing.expectEqual(@as(u6, 55), map.shift); + + for (keys, 0..) |key, i| map.putAssumeCapacity(key, i); + try testing.expectEqual(keys.len, map.len); + + var it: usize = 0; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (it > entry.hash) { + return error.Unsorted; + } + it = entry.hash; + } + } + + for (keys, 0..) |key, i| try testing.expectEqual(i, map.get(key).?); + for (keys, 0..) |key, i| try testing.expectEqual(i, map.delete(key).?); + } +} + +test "HashMap: put, get, delete, grow" { + var seed: usize = 0; + while (seed < 128) : (seed += 1) { + var rng = std.rand.DefaultPrng.init(seed); + + const keys = try testing.allocator.alloc(usize, 512); + defer testing.allocator.free(keys); + + for (keys) |*key| key.* = rng.next(); + + var map = try AutoHashMap(usize, usize, 50).initCapacity(testing.allocator, 16); + defer map.deinit(testing.allocator); + + try testing.expectEqual(@as(u6, 60), map.shift); + + for (keys, 0..) |key, i| try map.put(testing.allocator, key, i); + + try testing.expectEqual(@as(u6, 54), map.shift); + try testing.expectEqual(keys.len, map.len); + + var it: usize = 0; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (it > entry.hash) { + return error.Unsorted; + } + it = entry.hash; + } + } + + for (keys, 0..) |key, i| try testing.expectEqual(i, map.get(key).?); + for (keys, 0..) |key, i| try testing.expectEqual(i, map.delete(key).?); + } +} + +test "SortedHashMap: cmp" { + const prefix = [_]u8{'0'} ** 8 ++ [_]u8{'1'} ** 23; + const a = prefix ++ [_]u8{0}; + const b = prefix ++ [_]u8{1}; + + try testing.expect(SortedHashMap(void, 100).cmp(a, b) == .lt); + try testing.expect(SortedHashMap(void, 100).cmp(b, a) == .gt); + try testing.expect(SortedHashMap(void, 100).cmp(a, a) == .eq); + try testing.expect(SortedHashMap(void, 100).cmp(b, b) == .eq); + try testing.expect(SortedHashMap(void, 100).cmp([_]u8{'i'} ++ [_]u8{'0'} ** 31, [_]u8{'o'} ++ [_]u8{'0'} ** 31) == .lt); + try testing.expect(SortedHashMap(void, 100).cmp([_]u8{ 'h', 'i' } ++ [_]u8{'0'} ** 30, [_]u8{ 'h', 'o' } ++ [_]u8{'0'} ** 30) == .lt); +} + +test "SortedHashMap: put, get, delete, grow" { + var seed: usize = 0; + while (seed < 128) : (seed += 1) { + var rng = std.rand.DefaultPrng.init(seed); + + const keys = try testing.allocator.alloc([32]u8, 512); + defer testing.allocator.free(keys); + + for (keys) |*key| rng.fill(key); + + var map = try SortedHashMap(usize, 50).initCapacity(testing.allocator, 16); + defer map.deinit(testing.allocator); + + try testing.expectEqual(@as(u6, 60), map.shift); + + for (keys, 0..) |key, i| try map.put(testing.allocator, key, i); + + try testing.expectEqual(@as(u6, 54), map.shift); + try testing.expectEqual(keys.len, map.len); + + var it = [_]u8{0} ** 32; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (!mem.order(u8, &it, &entry.hash).compare(.lte)) { + return error.Unsorted; + } + it = entry.hash; + } + } + + for (keys, 0..) |key, i| try testing.expectEqual(i, map.get(key).?); + for (keys, 0..) |key, i| try testing.expectEqual(i, map.delete(key).?); + } +} + +test "SortedHashMap: collision test" { + const prefix = [_]u8{22} ** 8 ++ [_]u8{1} ** 23; + + var map = try SortedHashMap(usize, 100).initCapacity(testing.allocator, 4); + defer map.deinit(testing.allocator); + + try map.put(testing.allocator, prefix ++ [_]u8{0}, 0); + try map.put(testing.allocator, prefix ++ [_]u8{1}, 1); + try map.put(testing.allocator, prefix ++ [_]u8{2}, 2); + try map.put(testing.allocator, prefix ++ [_]u8{3}, 3); + + var it = [_]u8{0} ** 32; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (!mem.order(u8, &it, &entry.hash).compare(.lte)) { + return error.Unsorted; + } + it = entry.hash; + } + } + + try testing.expectEqual(@as(usize, 0), map.get(prefix ++ [_]u8{0}).?); + try testing.expectEqual(@as(usize, 1), map.get(prefix ++ [_]u8{1}).?); + try testing.expectEqual(@as(usize, 2), map.get(prefix ++ [_]u8{2}).?); + try testing.expectEqual(@as(usize, 3), map.get(prefix ++ [_]u8{3}).?); + + try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?); + try testing.expectEqual(@as(usize, 0), map.delete(prefix ++ [_]u8{0}).?); + try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?); + try testing.expectEqual(@as(usize, 3), map.delete(prefix ++ [_]u8{3}).?); + + try map.put(testing.allocator, prefix ++ [_]u8{0}, 0); + try map.put(testing.allocator, prefix ++ [_]u8{2}, 2); + try map.put(testing.allocator, prefix ++ [_]u8{3}, 3); + try map.put(testing.allocator, prefix ++ [_]u8{1}, 1); + + it = [_]u8{0} ** 32; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (!mem.order(u8, &it, &entry.hash).compare(.lte)) { + return error.Unsorted; + } + it = entry.hash; + } + } + + try testing.expectEqual(@as(usize, 0), map.delete(prefix ++ [_]u8{0}).?); + try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?); + try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?); + try testing.expectEqual(@as(usize, 3), map.delete(prefix ++ [_]u8{3}).?); + + try map.put(testing.allocator, prefix ++ [_]u8{0}, 0); + try map.put(testing.allocator, prefix ++ [_]u8{2}, 2); + try map.put(testing.allocator, prefix ++ [_]u8{1}, 1); + try map.put(testing.allocator, prefix ++ [_]u8{3}, 3); + + it = [_]u8{0} ** 32; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (!mem.order(u8, &it, &entry.hash).compare(.lte)) { + return error.Unsorted; + } + it = entry.hash; + } + } + + try testing.expectEqual(@as(usize, 3), map.delete(prefix ++ [_]u8{3}).?); + try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?); + try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?); + try testing.expectEqual(@as(usize, 0), map.delete(prefix ++ [_]u8{0}).?); + + try map.put(testing.allocator, prefix ++ [_]u8{3}, 3); + try map.put(testing.allocator, prefix ++ [_]u8{0}, 0); + try map.put(testing.allocator, prefix ++ [_]u8{1}, 1); + try map.put(testing.allocator, prefix ++ [_]u8{2}, 2); + + it = [_]u8{0} ** 32; + for (map.slice()) |entry| { + if (!entry.isEmpty()) { + if (!mem.order(u8, &it, &entry.hash).compare(.lte)) { + return error.Unsorted; + } + it = entry.hash; + } + } + + try testing.expectEqual(@as(usize, 3), map.delete(prefix ++ [_]u8{3}).?); + try testing.expectEqual(@as(usize, 0), map.delete(prefix ++ [_]u8{0}).?); + try testing.expectEqual(@as(usize, 1), map.delete(prefix ++ [_]u8{1}).?); + try testing.expectEqual(@as(usize, 2), map.delete(prefix ++ [_]u8{2}).?); +} diff --git a/src/api/schema.d.ts b/src/api/schema.d.ts index 053ab9bb49..c97dd3423a 100644 --- a/src/api/schema.d.ts +++ b/src/api/schema.d.ts @@ -723,6 +723,7 @@ export interface BunInstall { global_bin_dir?: string; frozen_lockfile?: boolean; exact?: boolean; + concurrent_scripts?: uint32; } export interface ClientServerModule { diff --git a/src/api/schema.js b/src/api/schema.js index 40c02ad93c..44d38c36fc 100644 --- a/src/api/schema.js +++ b/src/api/schema.js @@ -3060,6 +3060,10 @@ function decodeBunInstall(bb) { result["exact"] = !!bb.readByte(); break; + case 21: + result["concurrent_scripts"] = bb.readUint32(); + break; + default: throw new Error("Attempted to parse invalid message"); } @@ -3192,6 +3196,12 @@ function encodeBunInstall(message, bb) { bb.writeByte(20); bb.writeByte(value); } + + var value = message["concurrent_scripts"]; + if (value != null) { + bb.writeByte(21); + bb.writeUint32(value); + } bb.writeByte(0); } diff --git a/src/api/schema.peechy b/src/api/schema.peechy index 798369a79f..32f1503a14 100644 --- a/src/api/schema.peechy +++ b/src/api/schema.peechy @@ -590,6 +590,7 @@ message BunInstall { string global_bin_dir = 18; bool frozen_lockfile = 19; bool exact = 20; + uint32 concurrent_scripts = 21; } struct ClientServerModule { diff --git a/src/api/schema.zig b/src/api/schema.zig index bf4aea1e75..f1739d9913 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -2895,6 +2895,9 @@ pub const Api = struct { /// exact exact: ?bool = null, + /// concurrent_scripts + concurrent_scripts: ?u32 = null, + pub fn decode(reader: anytype) anyerror!BunInstall { var this = std.mem.zeroes(BunInstall); @@ -2964,6 +2967,9 @@ pub const Api = struct { 20 => { this.exact = try reader.readValue(bool); }, + 21 => { + this.concurrent_scripts = try reader.readValue(u32); + }, else => { return error.InvalidMessage; }, @@ -3053,6 +3059,10 @@ pub const Api = struct { try writer.writeFieldID(20); try writer.writeInt(@as(u8, @intFromBool(exact))); } + if (this.concurrent_scripts) |concurrent_scripts| { + try writer.writeFieldID(21); + try writer.writeInt(concurrent_scripts); + } try writer.endMessage(); } }; diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index 036a95d485..067c1dec47 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -133,6 +133,10 @@ pub const FilePoll = struct { pub var owner: Owner = Owner.init(@as(*Deactivated, @ptrFromInt(@as(usize, 0xDEADBEEF)))); }; + const RunCommand = bun.RunCommand; + const LifecycleScriptSubprocess = RunCommand.LifecycleScriptSubprocess; + const LifecycleScriptSubprocessPid = RunCommand.LifecycleScriptSubprocess.PidPollData; + pub const Owner = bun.TaggedPointerUnion(.{ FileReader, FileSink, @@ -142,6 +146,8 @@ pub const FilePoll = struct { Deactivated, DNSResolver, GetAddrInfoRequest, + LifecycleScriptSubprocess, + LifecycleScriptSubprocessPid, }); fn updateFlags(poll: *FilePoll, updated: Flags.Set) void { @@ -270,6 +276,18 @@ pub const FilePoll = struct { loader.onMachportChange(); }, + @field(Owner.Tag, "LifecycleScriptSubprocess") => { + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) LifecycleScriptSubprocess", .{poll.fd}); + var loader: *LifecycleScriptSubprocess = ptr.as(LifecycleScriptSubprocess); + loader.onOutputUpdate(size_or_offset, poll.fileDescriptor()); + }, + + @field(Owner.Tag, "PidPollData") => { + log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) LifecycleScriptSubprocess Pid", .{poll.fd}); + var loader: *LifecycleScriptSubprocess = ptr.as(LifecycleScriptSubprocess); + loader.onProcessUpdate(size_or_offset); + }, + else => { log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {d}) disconnected?", .{poll.fd}); }, @@ -509,6 +527,36 @@ pub const FilePoll = struct { return poll; } + pub fn initWithPackageManager(m: *bun.PackageManager, fd: bun.FileDescriptor, flags: Flags.Struct, owner: anytype) *FilePoll { + return initWithPackageManagerWithOwner(m, fd, flags, Owner.init(owner)); + } + + pub fn initWithPackageManagerWithOwner(manager: *bun.PackageManager, fd: bun.FileDescriptor, flags: Flags.Struct, owner: Owner) *FilePoll { + var poll = manager.file_poll_store.get(); + poll.fd = @intCast(fd); + poll.flags = Flags.Set.init(flags); + poll.owner = owner; + poll.next_to_free = null; + + if (KQueueGenerationNumber != u0) { + max_generation_number +%= 1; + poll.generation_number = max_generation_number; + } + + return poll; + } + + pub inline fn canRef(this: *const FilePoll) bool { + if (this.flags.contains(.disable)) + return false; + + return !this.flags.contains(.has_incremented_poll_count); + } + + pub inline fn canUnref(this: *const FilePoll) bool { + return this.flags.contains(.has_incremented_poll_count); + } + /// Prevent a poll from keeping the process alive. pub fn unref(this: *FilePoll, vm: *JSC.VirtualMachine) void { log("unref", .{}); @@ -710,6 +758,10 @@ pub const FilePoll = struct { const invalid_fd = bun.invalid_fd; + pub inline fn fileDescriptor(this: *FilePoll) bun.FileDescriptor { + return @intCast(this.fd); + } + pub fn unregister(this: *FilePoll, loop: *Loop, force_unregister: bool) JSC.Maybe(void) { return this.unregisterWithFd(loop, this.fd, force_unregister); } diff --git a/src/async/windows_event_loop.zig b/src/async/windows_event_loop.zig index 654b4d87cd..5fb6e7652b 100644 --- a/src/async/windows_event_loop.zig +++ b/src/async/windows_event_loop.zig @@ -178,11 +178,29 @@ pub const FilePoll = struct { return poll; } + pub fn initWithPackageManager(m: *bun.PackageManager, fd: bun.FileDescriptor, flags: Flags.Struct, owner: anytype) *FilePoll { + return initWithPackageManagerWithOwner(m, fd, flags, Owner.init(owner)); + } + + pub fn initWithPackageManagerWithOwner(manager: *bun.PackageManager, fd: bun.FileDescriptor, flags: Flags.Struct, owner: Owner) *FilePoll { + var poll = manager.file_poll_store.get(); + poll.fd = fd; + poll.flags = Flags.Set.init(flags); + poll.owner = owner; + poll.next_to_free = null; + + return poll; + } + pub fn deinit(this: *FilePoll) void { var vm = JSC.VirtualMachine.get(); this.deinitWithVM(vm); } + pub inline fn fileDescriptor(this: *FilePoll) bun.FileDescriptor { + return this.fd; + } + pub const deinitForceUnregister = deinit; pub fn unregister(this: *FilePoll, loop: *Loop) bool { diff --git a/src/bit_set.zig b/src/bit_set.zig index 36f683ea40..58e59233a8 100644 --- a/src/bit_set.zig +++ b/src/bit_set.zig @@ -687,6 +687,63 @@ pub const DynamicBitSetUnmanaged = struct { var empty_masks_data = [_]MaskInt{ 0, undefined }; const empty_masks_ptr = empty_masks_data[1..2]; + /// Do not resize the bitsets! + /// + /// Single buffer for multiple bitsets of equal length. Does not + /// implement all methods of DynamicBitSetUnmanaged and should + /// be used carefully. + pub const List = struct { + buf: []MaskInt, + n: usize, + bit_length: usize, + + pub fn initEmpty(allocator: Allocator, n: usize, bit_length: usize) !List { + const masks = numMasks(bit_length); + const single_bitset_buf_size = masks + 1; + + const buf = try allocator.alloc(MaskInt, single_bitset_buf_size * n); + + const fill_value = std.math.boolMask(MaskInt, false); + @memset(buf, fill_value); + + for (0..n) |i| { + buf[i * single_bitset_buf_size] = single_bitset_buf_size; + } + + return List{ + .buf = buf, + .n = n, + .bit_length = bit_length, + }; + } + + pub fn deinit(this: List, allocator: Allocator) void { + allocator.free(this.buf); + } + + pub fn at(this: List, i: usize) Self { + const num_masks = numMasks(this.bit_length); + const single_bitset_buf_size = num_masks + 1; + + const offset = (single_bitset_buf_size * i); + + return Self{ + .bit_length = this.bit_length, + .masks = this.buf[offset..].ptr + 1, + }; + } + + pub fn set(this: List, i: usize, j: usize) void { + var bitset = this.at(i); + bitset.set(j); + } + + pub fn setUnion(this: List, i: usize, other: Self) void { + var bitset = this.at(i); + bitset.setUnion(other); + } + }; + /// Creates a bit set with no elements present. /// If bit_length is not zero, deinit must eventually be called. pub fn initEmpty(allocator: Allocator, bit_length: usize) !Self { diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index 5e6053b281..162fec2fda 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -16,6 +16,7 @@ const Which = @import("../../../which.zig"); const Async = bun.Async; const IPC = @import("../../ipc.zig"); const uws = bun.uws; +const LifecycleScriptSubprocess = @import("../../../cli/run_command.zig").RunCommand.LifecycleScriptSubprocess; const PosixSpawn = @import("./spawn.zig").PosixSpawn; @@ -1530,13 +1531,7 @@ pub const Subprocess = struct { break :brk pid; } - const kernel = @import("../../../analytics.zig").GenerateHeader.GeneratePlatform.kernelVersion(); - - // pidfd_nonblock only supported in 5.10+ - var pidfd_flags: u32 = if (!is_sync and kernel.orderWithoutTag(.{ .major = 5, .minor = 10, .patch = 0 }).compare(.gte)) - std.os.O.NONBLOCK - else - 0; + var pidfd_flags = pidfdFlagsForLinux(); var rc = std.os.linux.pidfd_open( @intCast(pid), @@ -2199,6 +2194,16 @@ pub const Subprocess = struct { this.updateHasPendingActivity(); } + pub fn pidfdFlagsForLinux() u32 { + const kernel = @import("../../../analytics.zig").GenerateHeader.GeneratePlatform.kernelVersion(); + + // pidfd_nonblock only supported in 5.10+ + return if (kernel.orderWithoutTag(.{ .major = 5, .minor = 10, .patch = 0 }).compare(.gte)) + std.os.O.NONBLOCK + else + 0; + } + pub const IPCHandler = IPC.NewIPCHandler(Subprocess); // Machines which do not support pidfd_open (GVisor, Linux Kernel < 5.6) @@ -2206,7 +2211,9 @@ pub const Subprocess = struct { // We use a single thread to call waitpid() in a loop. pub const WaiterThread = struct { concurrent_queue: Queue = .{}, + lifecycle_script_concurrent_queue: LifecycleScriptTaskQueue = .{}, queue: std.ArrayList(*Subprocess) = std.ArrayList(*Subprocess).init(bun.default_allocator), + lifecycle_script_queue: std.ArrayList(*LifecycleScriptSubprocess) = std.ArrayList(*LifecycleScriptSubprocess).init(bun.default_allocator), started: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), signalfd: if (Environment.isLinux) bun.FileDescriptor else u0 = undefined, eventfd: if (Environment.isLinux) bun.FileDescriptor else u0 = undefined, @@ -2224,10 +2231,16 @@ pub const Subprocess = struct { next: ?*WaitTask = null, }; + pub const LifecycleScriptWaitTask = struct { + lifecycle_script_subprocess: *LifecycleScriptSubprocess, + next: ?*LifecycleScriptWaitTask = null, + }; + var should_use_waiter_thread = false; const stack_size = 512 * 1024; pub const Queue = bun.UnboundedQueue(WaitTask, .next); + pub const LifecycleScriptTaskQueue = bun.UnboundedQueue(LifecycleScriptWaitTask, .next); pub var instance: WaiterThread = .{}; pub fn init() !void { std.debug.assert(should_use_waiter_thread); @@ -2290,55 +2303,127 @@ pub const Subprocess = struct { } } + pub fn appendLifecycleScriptSubprocess(lifecycle_script: *LifecycleScriptSubprocess) void { + var task = bun.default_allocator.create(LifecycleScriptWaitTask) catch unreachable; + task.* = LifecycleScriptWaitTask{ + .lifecycle_script_subprocess = lifecycle_script, + }; + instance.lifecycle_script_concurrent_queue.push(task); + + init() catch @panic("Failed to start WaiterThread"); + + if (comptime Environment.isLinux) { + const one = @as([8]u8, @bitCast(@as(usize, 1))); + _ = std.os.write(instance.eventfd, &one) catch @panic("Failed to write to eventfd"); + } + } + + fn loopSubprocess(this: *WaiterThread) void { + { + var batch = this.concurrent_queue.popBatch(); + var iter = batch.iterator(); + this.queue.ensureUnusedCapacity(batch.count) catch unreachable; + while (iter.next()) |task| { + this.queue.appendAssumeCapacity(task.subprocess); + bun.default_allocator.destroy(task); + } + } + + var queue: []*Subprocess = this.queue.items; + var i: usize = 0; + while (queue.len > 0 and i < queue.len) { + var process = queue[i]; + + // this case shouldn't really happen + if (process.pid == bun.invalid_fd) { + _ = this.queue.orderedRemove(i); + _ = process.poll.wait_thread.ref_count.fetchSub(1, .Monotonic); + queue = this.queue.items; + continue; + } + + const result = PosixSpawn.waitpid(process.pid, std.os.W.NOHANG); + if (result == .err or (result == .result and result.result.pid == process.pid)) { + _ = this.queue.orderedRemove(i); + queue = this.queue.items; + + var task = bun.default_allocator.create(WaitPidResultTask) catch unreachable; + task.* = WaitPidResultTask{ + .result = result, + .subprocess = process, + }; + + process.globalThis.bunVMConcurrently().enqueueTaskConcurrent( + JSC.ConcurrentTask.create( + JSC.Task.init(task), + ), + ); + } + + i += 1; + } + } + + fn loopLifecycleScriptsSubprocess(this: *WaiterThread) void { + { + var batch = this.lifecycle_script_concurrent_queue.popBatch(); + var iter = batch.iterator(); + this.lifecycle_script_queue.ensureUnusedCapacity(batch.count) catch unreachable; + while (iter.next()) |task| { + this.lifecycle_script_queue.appendAssumeCapacity(task.lifecycle_script_subprocess); + bun.default_allocator.destroy(task); + } + } + + var queue: []*LifecycleScriptSubprocess = this.lifecycle_script_queue.items; + var i: usize = 0; + while (queue.len > 0 and i < queue.len) { + var lifecycle_script_subprocess = queue[i]; + + if (lifecycle_script_subprocess.pid == bun.invalid_fd) { + _ = this.lifecycle_script_queue.orderedRemove(i); + queue = this.lifecycle_script_queue.items; + } + + // const result = PosixSpawn.waitpid(lifecycle_script_subprocess.pid, std.os.W.NOHANG); + switch (PosixSpawn.waitpid(lifecycle_script_subprocess.pid, std.os.W.NOHANG)) { + .err => |err| { + std.debug.print("waitpid error: {s}\n", .{@tagName(err.getErrno())}); + Output.prettyErrorln("error: Failed to run {s} script from \"{s}\" due to error {d} {s}", .{ + lifecycle_script_subprocess.script_name, + lifecycle_script_subprocess.package_name, + err.errno, + @tagName(err.getErrno()), + }); + Output.flush(); + _ = lifecycle_script_subprocess.manager.pending_lifecycle_script_tasks.fetchSub(1, .Monotonic); + _ = LifecycleScriptSubprocess.alive_count.fetchSub(1, .Monotonic); + }, + .result => |result| { + if (result.pid == lifecycle_script_subprocess.pid) { + _ = this.lifecycle_script_queue.orderedRemove(i); + queue = this.lifecycle_script_queue.items; + + lifecycle_script_subprocess.onResult(.{ + .pid = result.pid, + .status = result.status, + }); + } + }, + } + + i += 1; + } + } + pub fn loop() void { Output.Source.configureNamedThread("Waitpid"); var this = &instance; while (true) { - { - var batch = this.concurrent_queue.popBatch(); - var iter = batch.iterator(); - this.queue.ensureUnusedCapacity(batch.count) catch unreachable; - while (iter.next()) |task| { - this.queue.appendAssumeCapacity(task.subprocess); - bun.default_allocator.destroy(task); - } - } - - var queue: []*Subprocess = this.queue.items; - var i: usize = 0; - while (queue.len > 0 and i < queue.len) { - var process = queue[i]; - - // this case shouldn't really happen - if (process.pid == bun.invalid_fd) { - _ = this.queue.orderedRemove(i); - _ = process.poll.wait_thread.ref_count.fetchSub(1, .Monotonic); - queue = this.queue.items; - continue; - } - - const result = PosixSpawn.waitpid(process.pid, std.os.W.NOHANG); - if (result == .err or (result == .result and result.result.pid == process.pid)) { - _ = this.queue.orderedRemove(i); - queue = this.queue.items; - - var task = bun.default_allocator.create(WaitPidResultTask) catch unreachable; - task.* = WaitPidResultTask{ - .result = result, - .subprocess = process, - }; - - process.globalThis.bunVMConcurrently().enqueueTaskConcurrent( - JSC.ConcurrentTask.create( - JSC.Task.init(task), - ), - ); - } - - i += 1; - } + this.loopSubprocess(); + this.loopLifecycleScriptsSubprocess(); if (comptime Environment.isLinux) { var polls = [_]std.os.pollfd{ diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig index b682846deb..d7761e90a1 100644 --- a/src/bun.js/bindings/bindings.zig +++ b/src/bun.js/bindings/bindings.zig @@ -4725,7 +4725,7 @@ pub const JSValue = enum(JSValueReprInt) { } pub fn toBoolean(this: JSValue) bool { - if (isUndefinedOrNull(this)) { + if (isEmptyOrUndefinedOrNull(this)) { return false; } diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index c8dce34f78..d0ccee54af 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -3398,17 +3398,17 @@ pub const Arguments = struct { if (arguments.next()) |arg| { arguments.eat(); - recursive = arg.asBoolean(); + recursive = arg.toBoolean(); } if (arguments.next()) |arg| { arguments.eat(); - errorOnExist = arg.asBoolean(); + errorOnExist = arg.toBoolean(); } if (arguments.next()) |arg| { arguments.eat(); - force = arg.asBoolean(); + force = arg.toBoolean(); } if (arguments.next()) |arg| { diff --git a/src/bun.zig b/src/bun.zig index e98c57d6ea..5db681175f 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1580,6 +1580,7 @@ pub fn isMissingIOUring() bool { pub const CLI = @import("./cli.zig"); pub const PackageManager = @import("./install/install.zig").PackageManager; +pub const RunCommand = @import("./cli/run_command.zig").RunCommand; pub const fs = @import("./fs.zig"); pub const Bundler = bundler.Bundler; diff --git a/src/bunfig.zig b/src/bunfig.zig index a777475da5..a01735adb4 100644 --- a/src/bunfig.zig +++ b/src/bunfig.zig @@ -18,6 +18,7 @@ const Defines = @import("./defines.zig"); const ConditionsMap = @import("./resolver/package_json.zig").ESModule.ConditionsMap; const Api = @import("./api/schema.zig").Api; const Npm = @import("./install/npm.zig"); +const PackageManager = @import("./install/install.zig").PackageManager; const PackageJSON = @import("./resolver/package_json.zig").PackageJSON; const resolver = @import("./resolver/resolver.zig"); pub const MacroImportReplacementMap = bun.StringArrayHashMap(string); @@ -395,6 +396,13 @@ pub const Bunfig = struct { } } + if (_bun.get("concurrentScripts")) |jobs| { + if (jobs.data == .e_number) { + install.concurrent_scripts = jobs.data.e_number.toU32(); + if (install.concurrent_scripts.? == 0) install.concurrent_scripts = null; + } + } + if (_bun.get("lockfile")) |lockfile_expr| { if (lockfile_expr.get("print")) |lockfile| { try this.expect(lockfile, .e_string); diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index 83832bf513..827c33e37c 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -255,8 +255,15 @@ pub const BunxCommand = struct { ctx, &this_bundler, null, - &ORIGINAL_PATH, true, + ); + + try Run.configurePathForRun( + ctx, + root_dir_info, + &this_bundler, + &ORIGINAL_PATH, + root_dir_info.abs_path, force_using_bun, ); diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index 907289907d..a75b8e6c2a 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -202,7 +202,7 @@ pub const PackageManagerCommand = struct { var directories = std.ArrayList(NodeModulesFolder).init(ctx.allocator); defer directories.deinit(); - while (iterator.nextNodeModulesFolder()) |node_modules| { + while (iterator.nextNodeModulesFolder(null)) |node_modules| { const path_len = node_modules.relative_path.len; const path = try ctx.allocator.alloc(u8, path_len + 1); bun.copy(u8, path, node_modules.relative_path); @@ -214,6 +214,7 @@ pub const PackageManagerCommand = struct { try directories.append(.{ .relative_path = path[0..path_len :0], .dependencies = dependencies, + .tree_id = node_modules.tree_id, }); } diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index fbd17bbf50..d07c47b6f9 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -1,4 +1,5 @@ const bun = @import("root").bun; +const Async = bun.Async; const string = bun.string; const Output = bun.Output; const Global = bun.Global; @@ -9,6 +10,9 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const C = bun.C; const std = @import("std"); +const uws = bun.uws; +const JSC = bun.JSC; +const WaiterThread = JSC.Subprocess.WaiterThread; const lex = bun.js_lexer; const logger = @import("root").bun.logger; @@ -43,6 +47,10 @@ const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; const yarn_commands: []u64 = @import("./list-of-yarn-commands.zig").all_yarn_commands; const ShellCompletions = @import("./shell_completions.zig"); +const PosixSpawn = @import("../bun.js/api/bun/spawn.zig").PosixSpawn; + +const PackageManager = @import("../install/install.zig").PackageManager; +const Lockfile = @import("../install/lockfile.zig"); pub const RunCommand = struct { const shells_to_search = &[_]string{ @@ -51,7 +59,7 @@ pub const RunCommand = struct { "zsh", }; - pub fn findShell(PATH: string, cwd: string) ?string { + pub fn findShell(PATH: string, cwd: string) ?stringZ { if (comptime Environment.isWindows) { return "C:\\Windows\\System32\\cmd.exe"; } @@ -227,7 +235,509 @@ pub const RunCommand = struct { const log = Output.scoped(.RUN, false); - pub fn runPackageScript( + pub const LifecycleScriptSubprocess = struct { + script_name: []const u8, + package_name: []const u8, + + scripts: [6]?Lockfile.Scripts.Entry, + current_script_index: usize = 0, + + finished_fds: u8 = 0, + + pid: std.os.pid_t = bun.invalid_fd, + + output_buffer: bun.ByteList, + pid_poll: *Async.FilePoll, + waitpid_result: ?PosixSpawn.WaitPidResult, + stdout_poll: *Async.FilePoll, + stderr_poll: *Async.FilePoll, + manager: *PackageManager, + envp: [:null]?[*:0]u8, + + pub var max_alive = 32; + pub var alive_count: std.atomic.Atomic(usize) = std.atomic.Atomic(usize).init(0); + + /// A "nothing" struct that lets us reuse the same pointer + /// but with a different tag for the file poll + pub const PidPollData = struct { process: LifecycleScriptSubprocess }; + + pub fn spawnNextScript(this: *LifecycleScriptSubprocess, next_script_index: usize) !void { + _ = alive_count.fetchAdd(1, .Monotonic); + errdefer _ = alive_count.fetchSub(1, .Monotonic); + + const manager = this.manager; + const original_script = this.scripts[next_script_index].?; + const cwd = original_script.cwd; + const env = manager.env; + const name = Lockfile.Scripts.names[next_script_index]; + + if (manager.scripts_node) |scripts_node| { + if (manager.finished_installing.load(.Monotonic)) { + manager.setNodeName( + scripts_node, + original_script.package_name, + PackageManager.ProgressStrings.script_emoji, + true, + ); + scripts_node.activate(); + manager.progress.refresh(); + } + } + + this.script_name = name; + this.package_name = original_script.package_name; + this.current_script_index = next_script_index; + this.waitpid_result = null; + this.finished_fds = 0; + this.output_buffer = .{}; + + const shell_bin = findShell(env.map.get("PATH") orelse "", cwd) orelse return error.MissingShell; + + var copy_script = try std.ArrayList(u8).initCapacity(manager.allocator, original_script.script.len + 1); + try replacePackageManagerRun(©_script, original_script.script); + try copy_script.append(0); + + var combined_script: [:0]u8 = copy_script.items[0 .. copy_script.items.len - 1 :0]; + + var argv = try manager.allocator.allocSentinel(?[*:0]const u8, 3, null); + defer manager.allocator.free(argv); + argv[0] = shell_bin; + argv[1] = "-c"; + argv[2] = combined_script; + + // var arena = bun.ArenaAllocator.init(manager.allocator); + // defer arena.deinit(); + + // const envp = try env.map.createNullDelimitedEnvMap(arena.allocator()); + // { + // var counter: usize = 0; + // for (this.envp) |e| { + // if (e) |_e| { + // counter += bun.span(_e).len; + // } + // } + + // std.debug.print("argv: \"{s}\" \"{s}\" \"{s}\"\n", .{ argv[0].?, argv[1].?, argv[2].? }); + // std.debug.print("env length: {d}\n", .{counter}); + // } + + var flags: i32 = bun.C.POSIX_SPAWN_SETSIGDEF | bun.C.POSIX_SPAWN_SETSIGMASK; + if (comptime Environment.isMac) { + flags |= bun.C.POSIX_SPAWN_CLOEXEC_DEFAULT; + } + + var attr = try PosixSpawn.Attr.init(); + defer attr.deinit(); + try attr.set(@intCast(flags)); + try attr.resetSignals(); + + var actions = try PosixSpawn.Actions.init(); + defer actions.deinit(); + try actions.openZ(bun.STDIN_FD, "/dev/null", std.os.O.RDONLY, 0o664); + + // Have both stdout and stderr write to the same buffer + const fdsOut = try std.os.pipe2(0); + try actions.dup2(fdsOut[1], bun.STDOUT_FD); + + const fdsErr = try std.os.pipe2(0); + try actions.dup2(fdsErr[1], bun.STDERR_FD); + + try actions.chdir(cwd); + + const pid = brk: { + defer { + _ = bun.sys.close(fdsOut[1]); + _ = bun.sys.close(fdsErr[1]); + } + switch (PosixSpawn.spawnZ( + argv[0].?, + actions, + attr, + argv, + this.envp, + )) { + .err => |err| { + Output.prettyErrorln("error: Failed to spawn script {s} due to error {d} {s}", .{ + name, + err.errno, + @tagName(err.getErrno()), + }); + Output.flush(); + return; + }, + .result => |pid| break :brk pid, + } + }; + this.pid = pid; + + const pid_fd: std.os.fd_t = brk: { + if (!Environment.isLinux or WaiterThread.shouldUseWaiterThread()) { + break :brk pid; + } + + var pidfd_flags = JSC.Subprocess.pidfdFlagsForLinux(); + + var fd = std.os.linux.pidfd_open( + @intCast(pid), + pidfd_flags, + ); + + while (true) { + switch (std.os.linux.getErrno(fd)) { + .SUCCESS => break :brk @intCast(fd), + .INTR => { + fd = std.os.linux.pidfd_open( + @intCast(pid), + pidfd_flags, + ); + continue; + }, + else => |err| { + if (err == .INVAL) { + if (pidfd_flags != 0) { + fd = std.os.linux.pidfd_open( + @intCast(pid), + 0, + ); + pidfd_flags = 0; + continue; + } + } + + if (err == .NOSYS) { + WaiterThread.setShouldUseWaiterThread(); + break :brk pid; + } + + var status: u32 = 0; + // ensure we don't leak the child process on error + _ = std.os.linux.waitpid(pid, &status, 0); + + Output.prettyErrorln("error: Failed to spawn script {s} due to error {d} {s}", .{ + name, + err, + @tagName(err), + }); + Output.flush(); + return; + }, + } + } + }; + + this.stdout_poll = Async.FilePoll.initWithPackageManager(manager, fdsOut[0], .{}, this); + this.stderr_poll = Async.FilePoll.initWithPackageManager(manager, fdsErr[0], .{}, this); + + _ = try this.stdout_poll.register(this.manager.uws_event_loop, .readable, false).unwrap(); + _ = try this.stderr_poll.register(this.manager.uws_event_loop, .readable, false).unwrap(); + + if (WaiterThread.shouldUseWaiterThread()) { + WaiterThread.appendLifecycleScriptSubprocess(this); + } else { + this.pid_poll = Async.FilePoll.initWithPackageManager( + manager, + pid_fd, + .{}, + @as(*PidPollData, @ptrCast(this)), + ); + switch (this.pid_poll.register( + this.manager.uws_event_loop, + .process, + true, + )) { + .result => {}, + .err => |err| { + // Sometimes the pid poll can fail to register if the process exits + // between posix_spawn() and pid_poll.register(), but it is unlikely. + // Any other error is unexpected here. + if (err.getErrno() != .SRCH) { + @panic("This shouldn't happen. Could not register pid poll"); + } + + this.onProcessUpdate(0); + }, + } + } + } + + pub fn onOutputUpdate(this: *LifecycleScriptSubprocess, size: i64, fd: bun.FileDescriptor) void { + if (comptime Environment.isMac) { + if (size == 0) { + std.debug.assert(this.finished_fds < 2); + this.finished_fds += 1; + + // close poll as soon as possible to prevent + // another size=0 message. + const poll = if (this.stdout_poll.fileDescriptor() == fd) + this.stdout_poll + else + this.stderr_poll; + _ = poll.unregister(this.manager.uws_event_loop, false); + // FD is already closed + + if (this.waitpid_result) |result| { + if (this.finished_fds == 2) { + // potential free() + this.onResult(result); + } + } + return; + } + this.output_buffer.ensureUnusedCapacity(this.manager.allocator, @intCast(size)) catch bun.outOfMemory(); + var remaining = size; + while (remaining > 0) { + switch (bun.sys.read(fd, this.output_buffer.ptr[this.output_buffer.len..this.output_buffer.cap])) { + .result => |bytes_read| { + this.output_buffer.len += @truncate(bytes_read); + remaining -|= @intCast(bytes_read); + }, + .err => |err| { + Output.prettyErrorln("error: Failed to read {s} script output from \"{s}\" due to error {d} {s}", .{ + this.script_name, + this.package_name, + err.errno, + @tagName(err.getErrno()), + }); + return; + }, + } + } + } else { + this.output_buffer.ensureUnusedCapacity(this.manager.allocator, 32) catch bun.outOfMemory(); + switch (bun.sys.read(fd, this.output_buffer.ptr[this.output_buffer.len..this.output_buffer.cap])) { + .result => |bytes_read| { + this.output_buffer.len += @truncate(bytes_read); + if (bytes_read == 0) { + std.debug.assert(this.finished_fds < 2); + this.finished_fds += 1; + // close poll as soon as possible to prevent + // another size=0 message. + const poll = if (this.stdout_poll.fileDescriptor() == fd) + this.stdout_poll + else + this.stderr_poll; + _ = poll.unregister(this.manager.uws_event_loop, false); + // FD is already closed + + if (this.waitpid_result) |result| { + if (this.finished_fds == 2) { + // potential free() + this.onResult(result); + } + } + return; + } + if (bytes_read < 32) { + return; + } + }, + .err => |err| { + Output.prettyErrorln("error: Failed to read {s} script output from \"{s}\" due to error {d} {s}", .{ + this.script_name, + this.package_name, + err.errno, + @tagName(err.getErrno()), + }); + return; + }, + } + + while (true) { + this.output_buffer.ensureUnusedCapacity(this.manager.allocator, 32) catch bun.outOfMemory(); + switch (bun.sys.read(fd, this.output_buffer.ptr[this.output_buffer.len..this.output_buffer.cap])) { + .result => |bytes_read| { + this.output_buffer.len += @truncate(bytes_read); + if (bytes_read < 32) { + return; + } + }, + .err => |err| { + Output.prettyErrorln("error: Failed to read {s} script output from \"{s}\" due to error {d} {s}", .{ + this.script_name, + this.package_name, + err.errno, + @tagName(err.getErrno()), + }); + return; + }, + } + } + } + } + + pub fn printOutput(this: *LifecycleScriptSubprocess) void { + Output.disableBuffering(); + Output.flush(); + Output.errorWriter().print("{s}\n", .{this.output_buffer.slice()}) catch {}; + Output.enableBuffering(); + } + + pub fn onProcessUpdate(this: *LifecycleScriptSubprocess, _: i64) void { + while (true) { + switch (PosixSpawn.waitpid(this.pid, std.os.W.NOHANG)) { + .err => |err| { + Output.prettyErrorln("error: Failed to run {s} script from \"{s}\" due to error {d} {s}", .{ + this.script_name, + this.package_name, + err.errno, + @tagName(err.getErrno()), + }); + Output.flush(); + _ = this.manager.pending_lifecycle_script_tasks.fetchSub(1, .Monotonic); + _ = alive_count.fetchSub(1, .Monotonic); + return; + }, + .result => |result| { + if (result.pid != this.pid) { + continue; + } + this.onResult(result); + return; + }, + } + } + } + + /// This function may free the *LifecycleScriptSubprocess + pub fn onResult(this: *LifecycleScriptSubprocess, result: PosixSpawn.WaitPidResult) void { + _ = alive_count.fetchSub(1, .Monotonic); + if (result.pid == 0) { + Output.prettyErrorln("error: Failed to run {s} script from \"{s}\" due to error {d} {s}", .{ + this.script_name, + this.package_name, + 0, + "Unknown", + }); + this.deinit(); + Output.flush(); + Global.exit(1); + return; + } + if (std.os.W.IFEXITED(result.status)) { + std.debug.assert(this.finished_fds <= 2); + if (this.finished_fds < 2) { + this.waitpid_result = result; + return; + } + + const code = std.os.W.EXITSTATUS(result.status); + if (code > 0) { + this.printOutput(); + Output.prettyErrorln("error: {s} script from \"{s}\" exited with {any}", .{ + this.script_name, + this.package_name, + bun.SignalCode.from(code), + }); + this.deinit(); + Output.flush(); + Global.exit(code); + } + + if (this.manager.scripts_node) |scripts_node| { + if (this.manager.finished_installing.load(.Monotonic)) { + scripts_node.completeOne(); + } else { + _ = @atomicRmw(usize, &scripts_node.unprotected_completed_items, .Add, 1, .Monotonic); + } + } + + for (this.current_script_index + 1..Lockfile.Scripts.names.len) |new_script_index| { + if (this.scripts[new_script_index] != null) { + this.resetPolls(); + this.spawnNextScript(new_script_index) catch |err| { + Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ + Lockfile.Scripts.names[new_script_index], + @errorName(err), + }); + Global.exit(1); + }; + return; + } + } + + // the last script finished + _ = this.manager.pending_lifecycle_script_tasks.fetchSub(1, .Monotonic); + + if (this.finished_fds == 2) { + this.deinit(); + } + return; + } + if (std.os.W.IFSIGNALED(result.status)) { + const signal = std.os.W.TERMSIG(result.status); + + if (this.finished_fds < 2) { + this.waitpid_result = result; + return; + } + this.printOutput(); + Output.prettyErrorln("error: {s} script from \"{s}\" exited with {any}", .{ + this.script_name, + this.package_name, + bun.SignalCode.from(signal), + }); + Output.flush(); + Global.exit(1); + } + if (std.os.W.IFSTOPPED(result.status)) { + const signal = std.os.W.STOPSIG(result.status); + + if (this.finished_fds < 2) { + this.waitpid_result = result; + return; + } + this.printOutput(); + Output.prettyErrorln("error: {s} script from \"{s}\" was stopped by signal {any}", .{ + this.script_name, + this.package_name, + bun.SignalCode.from(signal), + }); + Output.flush(); + Global.exit(1); + } + + std.debug.panic("{s} script from \"{s}\" hit unexpected state {{ .pid = {d}, .status = {d} }}", .{ this.script_name, this.package_name, result.pid, result.status }); + } + + pub fn resetPolls(this: *LifecycleScriptSubprocess) void { + std.debug.assert(this.finished_fds == 2); + + const loop = this.manager.uws_event_loop; + + if (!WaiterThread.shouldUseWaiterThread()) { + _ = this.pid_poll.unregister(loop, false); + // FD is already closed + } + } + + pub fn deinit(this: *LifecycleScriptSubprocess) void { + this.resetPolls(); + this.output_buffer.deinitWithAllocator(this.manager.allocator); + this.manager.allocator.destroy(this); + } + }; + + pub fn spawnPackageScripts( + manager: *PackageManager, + list: Lockfile.Package.Scripts.List, + envp: [:null]?[*:0]u8, + ) !void { + var lifecycle_subprocess = try manager.allocator.create(LifecycleScriptSubprocess); + lifecycle_subprocess.scripts = list.items; + lifecycle_subprocess.manager = manager; + lifecycle_subprocess.envp = envp; + + lifecycle_subprocess.spawnNextScript(list.first_index) catch |err| { + Output.prettyErrorln("error: Failed to run script {s} due to error {s}", .{ + Lockfile.Scripts.names[list.first_index], + @errorName(err), + }); + }; + + _ = manager.pending_lifecycle_script_tasks.fetchAdd(1, .Monotonic); + } + + pub fn runPackageScriptForeground( allocator: std.mem.Allocator, original_script: string, name: string, @@ -329,6 +839,7 @@ pub const RunCommand = struct { return true; } + pub fn runBinary( ctx: Command.Context, executable: []const u8, @@ -486,9 +997,7 @@ pub const RunCommand = struct { ctx: Command.Context, this_bundler: *bundler.Bundler, env: ?*DotEnv.Loader, - ORIGINAL_PATH: *string, log_errors: bool, - force_using_bun: bool, ) !*DirInfo { var args = ctx.args; this_bundler.* = try bundler.Bundler.init(ctx.allocator, ctx.log, args, env); @@ -525,8 +1034,6 @@ pub const RunCommand = struct { return error.CouldntReadCurrentDirectory; }; - var package_json_dir: string = ""; - if (env == null) { this_bundler.env.loadProcess(); @@ -552,88 +1059,6 @@ pub const RunCommand = struct { } } - var bin_dirs = this_bundler.resolver.binDirs(); - - if (root_dir_info.enclosing_package_json) |package_json| { - if (root_dir_info.package_json == null) { - // no trailing slash - package_json_dir = std.mem.trimRight(u8, package_json.source.path.name.dir, "/"); - } - } - - var PATH = this_bundler.env.map.get("PATH") orelse ""; - ORIGINAL_PATH.* = PATH; - - const found_node = this_bundler.env.loadNodeJSConfig( - this_bundler.fs, - if (force_using_bun) bun_node_dir ++ "/node" else "", - ) catch false; - - var needs_to_force_bun = force_using_bun or !found_node; - var optional_bun_self_path: string = ""; - - var new_path_len: usize = PATH.len + 2; - for (bin_dirs) |bin| { - new_path_len += bin.len + 1; - } - - if (package_json_dir.len > 0) { - new_path_len += package_json_dir.len + 1; - } - - new_path_len += root_dir_info.abs_path.len + "node_modules/.bin".len + 1; - - if (needs_to_force_bun) { - new_path_len += bun_node_dir.len + 1; - } - - var new_path = try std.ArrayList(u8).initCapacity(ctx.allocator, new_path_len); - - if (needs_to_force_bun) { - createFakeTemporaryNodeExecutable(&new_path, &optional_bun_self_path) catch unreachable; - if (!force_using_bun) { - this_bundler.env.map.put("NODE", bun_node_dir ++ "/node") catch unreachable; - this_bundler.env.map.put("npm_node_execpath", bun_node_dir ++ "/node") catch unreachable; - this_bundler.env.map.put("npm_execpath", optional_bun_self_path) catch unreachable; - } - - needs_to_force_bun = false; - } - - { - var needs_delim = false; - if (package_json_dir.len > 0) { - defer needs_delim = true; - if (needs_delim) { - try new_path.append(std.fs.path.delimiter); - } - try new_path.appendSlice(package_json_dir); - } - - var bin_dir_i: i32 = @as(i32, @intCast(bin_dirs.len)) - 1; - // Iterate in reverse order - // Directories are added to bin_dirs in top-down order - // That means the parent-most node_modules/.bin will be first - while (bin_dir_i >= 0) : (bin_dir_i -= 1) { - defer needs_delim = true; - if (needs_delim) { - try new_path.append(std.fs.path.delimiter); - } - try new_path.appendSlice(bin_dirs[@as(usize, @intCast(bin_dir_i))]); - } - - if (needs_delim) { - try new_path.append(std.fs.path.delimiter); - } - try new_path.appendSlice(root_dir_info.abs_path); - try new_path.appendSlice(bun.pathLiteral("node_modules/.bin")); - try new_path.append(std.fs.path.delimiter); - try new_path.appendSlice(PATH); - } - - this_bundler.env.map.put("PATH", new_path.items) catch unreachable; - PATH = new_path.items; - this_bundler.env.map.putDefault("npm_config_local_prefix", this_bundler.fs.top_level_dir) catch unreachable; // we have no way of knowing what version they're expecting without running the node executable @@ -673,6 +1098,93 @@ pub const RunCommand = struct { return root_dir_info; } + pub fn configurePathForRun( + ctx: Command.Context, + root_dir_info: *DirInfo, + this_bundler: *bundler.Bundler, + ORIGINAL_PATH: ?*string, + cwd: string, + force_using_bun: bool, + ) !void { + var package_json_dir: string = ""; + + if (root_dir_info.enclosing_package_json) |package_json| { + if (root_dir_info.package_json == null) { + // no trailing slash + package_json_dir = std.mem.trimRight(u8, package_json.source.path.name.dir, "/"); + } + } + + var PATH = this_bundler.env.map.get("PATH") orelse ""; + if (ORIGINAL_PATH) |original_path| { + original_path.* = PATH; + } + + const found_node = this_bundler.env.loadNodeJSConfig( + this_bundler.fs, + if (force_using_bun) bun_node_dir ++ "/node" else "", + ) catch false; + + var needs_to_force_bun = force_using_bun or !found_node; + var optional_bun_self_path: string = ""; + + var new_path_len: usize = PATH.len + 2; + + if (package_json_dir.len > 0) { + new_path_len += package_json_dir.len + 1; + } + + { + var remain = cwd; + while (strings.lastIndexOfChar(remain, std.fs.path.sep)) |i| { + new_path_len += strings.withoutTrailingSlash(remain).len + "node_modules.bin".len + 1 + 2; // +2 for path separators, +1 for path delimiter + remain = remain[0..i]; + } else { + new_path_len += strings.withoutTrailingSlash(remain).len + "node_modules.bin".len + 1 + 2; // +2 for path separators, +1 for path delimiter + } + } + + if (needs_to_force_bun) { + new_path_len += bun_node_dir.len + 1; + } + + var new_path = try std.ArrayList(u8).initCapacity(ctx.allocator, new_path_len); + + if (needs_to_force_bun) { + createFakeTemporaryNodeExecutable(&new_path, &optional_bun_self_path) catch unreachable; + if (!force_using_bun) { + this_bundler.env.map.put("NODE", bun_node_dir ++ "/node") catch unreachable; + this_bundler.env.map.put("npm_node_execpath", bun_node_dir ++ "/node") catch unreachable; + this_bundler.env.map.put("npm_execpath", optional_bun_self_path) catch unreachable; + } + + needs_to_force_bun = false; + } + + { + if (package_json_dir.len > 0) { + try new_path.appendSlice(package_json_dir); + try new_path.append(std.fs.path.delimiter); + } + + var remain = cwd; + while (strings.lastIndexOfChar(remain, std.fs.path.sep)) |i| { + try new_path.appendSlice(strings.withoutTrailingSlash(remain)); + try new_path.appendSlice(bun.pathLiteral("/node_modules/.bin")); + try new_path.append(std.fs.path.delimiter); + remain = remain[0..i]; + } else { + try new_path.appendSlice(strings.withoutTrailingSlash(remain)); + try new_path.appendSlice(bun.pathLiteral("/node_modules/.bin")); + try new_path.append(std.fs.path.delimiter); + } + + try new_path.appendSlice(PATH); + } + + this_bundler.env.map.put("PATH", new_path.items) catch unreachable; + } + pub fn completions(ctx: Command.Context, default_completions: ?[]const string, reject_list: []const string, comptime filter: Filter) !ShellCompletions { var shell_out = ShellCompletions{}; if (filter != .script_exclude) { @@ -1080,7 +1592,8 @@ pub const RunCommand = struct { var ORIGINAL_PATH: string = ""; var this_bundler: bundler.Bundler = undefined; - var root_dir_info = try configureEnvForRun(ctx, &this_bundler, null, &ORIGINAL_PATH, log_errors, force_using_bun); + var root_dir_info = try configureEnvForRun(ctx, &this_bundler, null, log_errors); + try configurePathForRun(ctx, root_dir_info, &this_bundler, &ORIGINAL_PATH, root_dir_info.abs_path, force_using_bun); this_bundler.env.map.put("npm_lifecycle_event", script_name_to_search) catch unreachable; if (root_dir_info.enclosing_package_json) |package_json| { if (package_json.scripts) |scripts| { @@ -1093,11 +1606,11 @@ pub const RunCommand = struct { else => { if (scripts.get(script_name_to_search)) |script_content| { // allocate enough to hold "post${scriptname}" - var temp_script_buffer = try std.fmt.allocPrint(ctx.allocator, "ppre{s}", .{script_name_to_search}); + defer ctx.allocator.free(temp_script_buffer); if (scripts.get(temp_script_buffer[1..])) |prescript| { - if (!try runPackageScript( + if (!try runPackageScriptForeground( ctx.allocator, prescript, temp_script_buffer[1..], @@ -1110,7 +1623,7 @@ pub const RunCommand = struct { } } - if (!try runPackageScript( + if (!try runPackageScriptForeground( ctx.allocator, script_content, script_name_to_search, @@ -1123,7 +1636,7 @@ pub const RunCommand = struct { temp_script_buffer[0.."post".len].* = "post".*; if (scripts.get(temp_script_buffer)) |postscript| { - if (!try runPackageScript( + if (!try runPackageScriptForeground( ctx.allocator, postscript, temp_script_buffer, diff --git a/src/env_loader.zig b/src/env_loader.zig index 5a2e7e29cd..b134c09f3c 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -1015,6 +1015,8 @@ pub const Map = struct { }; const HashTable = bun.StringArrayHashMap(HashTableValue); + const GetOrPutResult = HashTable.GetOrPutResult; + map: HashTable, pub fn createNullDelimitedEnvMap(this: *Map, arena: std.mem.Allocator) ![:null]?[*:0]u8 { @@ -1032,7 +1034,7 @@ pub const Map = struct { bun.copy(u8, env_buf[pair.key_ptr.len + 1 ..], pair.value_ptr.value); envp_buf[i] = env_buf.ptr; } - std.debug.assert(i == envp_count); + if (comptime Environment.allow_assert) std.debug.assert(i == envp_count); } return envp_buf; } @@ -1071,6 +1073,10 @@ pub const Map = struct { }); } + pub inline fn getOrPutWithoutValue(this: *Map, key: string) !GetOrPutResult { + return this.map.getOrPut(key); + } + pub fn jsonStringify(self: *const @This(), writer: anytype) !void { var iterator = self.map.iterator(); diff --git a/src/install/default-trusted-dependencies.txt b/src/install/default-trusted-dependencies.txt new file mode 100644 index 0000000000..efdb12bdcb --- /dev/null +++ b/src/install/default-trusted-dependencies.txt @@ -0,0 +1,500 @@ +@airbnb/node-memwatch +@alaskaairux/icons +@antv/l7-react +@apollo/protobufjs +@apollo/rover +@applitools/eyes-storybook +@appsignal/nodejs +@arkweid/lefthook +@aws-amplify/cli +@azure/msal-node-extensions +@bahmutov/add-typescript-to-cypress +@bazel/concatjs +@bazel/cypress +@bazel/esbuild +@bazel/hide-bazel-files +@bazel/jasmine +@bazel/protractor +@bazel/rollup +@bazel/terser +@bazel/typescript +@bufbuild/buf +@carbon/charts +@carbon/charts-angular +@carbon/charts-react +@carbon/ibm-products +@carbon/icons-react +@carbon/pictograms-react +@carbon/react +@cdktf/node-pty-prebuilt-multiarch +@ckeditor/ckeditor5-react +@ckeditor/ckeditor5-vue +@cloudflare/wrangler +@compodoc/compodoc +@contrast/fn-inspect +@cubejs-backend/cubestore +@cubejs-backend/native +@cypress/snapshot +@danmarshall/deckgl-typings +@databricks/sql +@datadog/mobile-react-native +@datadog/native-appsec +@datadog/native-metrics +@datadog/pprof +@discordjs/opus +@eversdk/lib-node +@evilmartians/lefthook +@ffmpeg-installer/darwin-arm64 +@ffmpeg-installer/darwin-x64 +@ffmpeg-installer/linux-arm +@ffmpeg-installer/linux-arm64 +@ffmpeg-installer/linux-ia32 +@ffmpeg-installer/linux-x64 +@ffprobe-installer/darwin-arm64 +@ffprobe-installer/darwin-x64 +@ffprobe-installer/linux-arm +@ffprobe-installer/linux-arm64 +@ffprobe-installer/linux-ia32 +@ffprobe-installer/linux-x64 +@fingerprintjs/fingerprintjs-pro-react +@fortawesome/fontawesome-common-types +@fortawesome/fontawesome-free +@fortawesome/fontawesome-svg-core +@fortawesome/free-brands-svg-icons +@fortawesome/free-regular-svg-icons +@fortawesome/free-solid-svg-icons +@ghaiklor/x509 +@go-task/cli +@gql2ts/language-typescript +@injectivelabs/sdk-ts +@instana/autoprofile +@intlify/vue-i18n-bridge +@intlify/vue-router-bridge +@lightdash/cli +@matteodisabatino/gc_info +@memlab/cli +@microsoft.azure/autorest-core +@microsoft/teamsfx-cli +@microsoft/ts-command-line +@napi-rs/canvas-linux-x64-gnu +@napi-rs/canvas-linux-x64-musl +@napi-rs/pinyin +@napi-rs/simple-git-linux-arm64-gnu +@napi-rs/simple-git-linux-arm64-musl +@napi-rs/simple-git-linux-x64-gnu +@napi-rs/simple-git-linux-x64-musl +@nativescript/core +@nestjs/core +@netlify/esbuild +@newrelic/native-metrics +@notarize/qlc-cli +@nx-dotnet/core +@openapitools/openapi-generator-cli +@opensea/seaport-js +@opensearch-project/oui +@opentelemetry/instrumentation-grpc +@pact-foundation/pact-core +@pact-foundation/pact-node +@paloaltonetworks/postman-code-generators +@parcel/watcher +@pdftron/pdfnet-node +@percy/core +@pnpm/exe +@prisma/client +@prisma/engines +@progress/kendo-licensing +@pulumi/aws +@pulumi/aws-native +@pulumi/awsx +@pulumi/azure +@pulumi/azure-native +@pulumi/cloudflare +@pulumi/command +@pulumi/datadog +@pulumi/docker +@pulumi/gcp +@pulumi/github +@pulumi/kubernetes +@pulumi/postgresql +@pulumi/random +@pulumi/tls +@replayio/cypress +@replayio/playwright +@root/acme +@roots/bud-framework +@sanity/eslint-config-studio +@sap/hana-client +@sap/hana-performance-tools +@sap/hana-theme-vscode +@scarf/scarf +@sematext/gc-stats +@sentry/capacitor +@sentry/cli +@sentry/profiling-node +@serialport/bindings +@serialport/bindings-cpp +@shopify/ngrok +@shopify/plugin-cloudflare +@shopify/react-native-skia +@sitespeed.io/chromedriver +@sitespeed.io/edgedriver +@softvisio/core +@splunk/otel +@strapi/strapi +@substrate/connect +@sveltejs/kit +@swc/core +@syncfusion/ej2-angular-base +@taquito/taquito +@tds/core-colours +@temporalio/core-bridge +@tensorflow/tfjs-node +@trufflesuite/bigint-buffer +@trumbitta/nx-plugin-unused-deps +@typescript-tools/rust-implementation +@vaadin/vaadin-usage-statistics +@vscode/ripgrep +@vscode/sqlite3 +abstract-socket +admin-lte +appdynamics +appium-chromedriver +appium-windows-driver +applicationinsights-native-metrics +argon2 +autorest +avo +aws-crt +azure-arm-cdn +azure-arm-compute +azure-arm-network +azure-arm-storage +azure-functions-core-tools +azure-streamanalytics-cicd +babylonjs +backport +baseui +bcrypt +better-sqlite3 +bigint-buffer +bigscreen-player +blake-hash +bootstrap-fileinput +bootstrap-vue +browser-tabs-lock +bs-platform +bufferutil +bun +canvacord +canvas +carbon-addons-iot-react +carbon-components +carbon-components-angular +carbon-components-react +cbor-extract +ccxt +chromedriver +chromium +classic-level +cld +cldr-data +clevertap-react-native +clientjs +cmark-gfm +command-join +commitlint-config-jira +compresion +contentlayer +contextify +cordova.plugins.diagnostic +core-js-bundle +couchbase +cpu-features +cwebp-bin +cy2 +cypress +data-forge +dd-trace +deasync +detox +detox-recorder +discord-economy-super +diskusage +docsify +dooboolab-welcome +dotnet-2.0.0 +dprint +drivelist +dtrace-provider +duckdb +dugite +eccrypto +egg-bin +egg-ci +electron +electron-chromedriver +electron-prebuilt +electron-winstaller +elm +elm-format +es5-ext +esbuild +esoftplay +event-loop-stats +exifreader +external-svg-loader +farmhash +fast-folder-size +faunadb +ffi +ffi-napi +ffmpeg-static +fibers +fmerge +free-email-domains +fs-xattr +full-icu +gatsby +gatsby-cli +gatsby-telemetry +gc-stats +gcstats.js +geckodriver +gentype +ghooks +gif2webp-bin +gifsicle +git-commit-msg-linter +git-validate +git-win +gl +gmsmith +go-ios +grpc +grpc-tools +handbrake-js +hasura-cli +heapdump +highcharts-export-server +hiredis +hnswlib-node +hugo-bin +hummus +ibm_db +iconv +iedriver +iltorb +incremental-json-parser +inferno +install-peers +interruptor +iobroker.js-controller +iso-constants +isolated-vm +java +javascript-obfuscator +jest-preview +jpeg-recompress-bin +jpegtran-bin +keccak +kerberos +keytar +lefthook +leveldown +libpg-query +libpq +libxmljs +libxmljs2 +lint +lmdb +lmdb-store +local-cypress +lz4 +lzma-native +lzo +macos-alias +mbt +medusa-telemetry +memlab +metalsmith +microtime +minidump +mmmagic +modern-syslog +monaco-languageclient +mongodb-client-encryption +mongodb-crypt-library-dummy +mongodb-crypt-library-version +mongodb-memory-server +mozjpeg +ms-chromium-edge-driver +msgpackr-extract +msnodesqlv8 +msw +muhammara +neo4j-bloom +nestjs-pino +netlify-cli +next-plugin-preact +next-translate-plugin +ngrok +ngx-popperjs +nice-napi +node +node-expat +node-hid +node-jq +node-libcurl +node-pty +node-rdkafka +node-sass +node-webcrypto-ossl +node-zopfli +node-zopfli-es +nodegit +nodejieba +nodent-runtime +nuxt-edge +nx +odiff-bin +oniguruma +optipng-bin +oracledb +os-dns-native +parcel-bundler +parse-server +phantomjs +phantomjs-prebuilt +pkcs11js +playwright +playwright-chromium +playwright-firefox +playwright-webkit +pngout-bin +pngquant-bin +poolifier +posix +postinstall-postinstall +postinstall-prepare +pprof +pre-commit +pre-push +prisma +protobufjs +protoc +protoc-gen-grpc-web +puppeteer +quick-mongo-super +re2 +react-intl-universal +react-jsx-parser +react-native-calendar-picker +react-native-elements +react-native-inappbrowser-reborn +react-native-storage +react-native-stylex +react-native-unimodules +react-particles +react-ranger +react-tsparticles +react-vertical-timeline-component +realm +redis-memory-server +ref +ref-napi +registry-js +restana +rete +robotjs +rome +rovel.js +rxdb +sauce-connect-launcher +saucectl +scrollreveal +secp256k1 +segfault-handler +serverless +sfdx-cli +shared-git-hooks +sharp +simple-git-hooks +sleep +slice2js +snyk +sockopt +sodium-native +sonar-scanner +spectaql +spectron +spellchecker +sq-native +sqlite3 +sse4_crc32 +ssh2 +storage-engine +strapi +subrequests +subrequests-express +subrequests-json-merger +supabase +svelte-preprocess +svf-lib +swagger-ui +swiftlint +taco-cli +taiko +tesseract.js +tldjs +tree-sitter +tree-sitter-cli +tree-sitter-json +tree-sitter-kotlin +tree-sitter-typescript +tree-sitter-yaml +truffle +tsparticles-engine +ttag-cli +ttf2woff2 +turbo +typeit +typemoq +typeorm-fixtures-cli +typesense-instantsearch-adapter +unix-dgram +ursa-optional +usb +utf-8-validate +v8-profiler-next +vercel +vis-data +vis-network +vis-timeline +vue-demi +vue-echarts +vue-inbrowser-compiler-demi +vue-ls +vue-material +vue-popperjs +vue-test-utils +vuepress +vuex-module-decorators +wd +wdeasync +weak-napi +web3-bzz +web3-shh +webdev-toolkit +windows-build-tools +wix-style-react +wordpos +workerd +wrtc +xxhash +yarn +yo +yorkie +zapatos +zeromq +zlib-sync +zopflipng-bin \ No newline at end of file diff --git a/src/install/install-scripts-allowlist.txt b/src/install/install-scripts-allowlist.txt deleted file mode 100644 index fdd5d1c068..0000000000 --- a/src/install/install-scripts-allowlist.txt +++ /dev/null @@ -1,4 +0,0 @@ -static-ffmpeg -canvas -better-sqlite3 -node-sass \ No newline at end of file diff --git a/src/install/install.zig b/src/install/install.zig index 3b6d7896bf..ae344384ea 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -9,6 +9,9 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const C = bun.C; const std = @import("std"); +const uws = @import("../deps/uws.zig"); +const JSC = bun.JSC; +const DirInfo = @import("../resolver/dir_info.zig"); const JSLexer = bun.js_lexer; const logger = bun.logger; @@ -137,11 +140,17 @@ pub fn ExternalSliceAligned(comptime Type: type, comptime alignment_: ?u29) type } pub inline fn get(this: Slice, in: []const Type) []const Type { + if (comptime Environment.allow_assert) { + std.debug.assert(this.off + this.len <= in.len); + } // it should be impossible to address this out of bounds due to the minimum here return in.ptr[this.off..@min(in.len, this.off + this.len)]; } pub inline fn mut(this: Slice, in: []Type) []Type { + if (comptime Environment.allow_assert) { + std.debug.assert(this.off + this.len <= in.len); + } return in.ptr[this.off..@min(in.len, this.off + this.len)]; } @@ -1518,6 +1527,26 @@ const PackageInstall = struct { this.destination_dir.dir.deleteTree(bun.span(this.destination_dir_subpath)) catch {}; } + pub fn uninstallBeforeInstall(this: *PackageInstall) void { + // TODO(dylan-conway): depth first package installation to allow lifecycle scripts to start earlier + // + // if (this.install_order == .depth_first) { + // var subpath_dir = this.destination_dir.dir.openIterableDir(this.destination_dir_subpath, .{}) catch return; + // defer subpath_dir.close(); + // var iter = subpath_dir.iterateAssumeFirstIteration(); + // while (iter.next() catch null) |entry| { + // // skip node_modules because installation is depth first + // if (entry.kind != .directory or !strings.eqlComptime(entry.name, "node_modules")) { + // this.destination_dir.dir.deleteTree(entry.name) catch {}; + // } + // } + // } else { + // this.destination_dir.dir.deleteTree(bun.span(this.destination_dir_subpath)) catch {}; + // } + + this.destination_dir.dir.deleteTree(bun.span(this.destination_dir_subpath)) catch {}; + } + fn isDanglingSymlink(path: [:0]const u8) bool { if (comptime Environment.isLinux) { const rc = Syscall.system.open(path, @as(u32, std.os.O.PATH | 0), @as(u32, 0)); @@ -1545,7 +1574,7 @@ const PackageInstall = struct { const dest_path = this.destination_dir_subpath; // If this fails, we don't care. // we'll catch it the next error - if (!skip_delete and !strings.eqlComptime(dest_path, ".")) this.uninstall(); + if (!skip_delete and !strings.eqlComptime(dest_path, ".")) this.uninstallBeforeInstall(); const subdir = std.fs.path.dirname(dest_path); var dest_dir = if (subdir) |dir| brk: { @@ -1611,7 +1640,7 @@ const PackageInstall = struct { // If this fails, we don't care. // we'll catch it the next error - if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) this.uninstall(); + if (!skip_delete and !strings.eqlComptime(this.destination_dir_subpath, ".")) this.uninstallBeforeInstall(); var supported_method_to_use = if (strings.eqlComptime(this.cache_dir_subpath, ".") or strings.hasPrefixComptime(this.cache_dir_subpath, "..")) Method.symlink @@ -1719,7 +1748,10 @@ const Progress = std.Progress; const TaggedPointer = @import("../tagged_pointer.zig"); const TaskCallbackContext = union(Tag) { dependency: DependencyID, - node_modules_folder: bun.FileDescriptor, + node_modules_folder: struct { + fd: bun.FileDescriptor, + tree_id: Lockfile.Tree.Id, + }, root_dependency: DependencyID, root_request_id: PackageID, pub const Tag = enum { @@ -1750,6 +1782,51 @@ pub const CacheLevel = struct { const AsyncIO = bun.AsyncIO; const Waker = if (Environment.isPosix) bun.AsyncIO.Waker else *bun.uws.UVLoop; +const Waiter = struct { + onWait: *const fn (this: *anyopaque) AsyncIO.Errno!usize, + onWake: *const fn (this: *anyopaque) void, + ctx: *anyopaque, + + pub fn init( + ctx: anytype, + comptime onWait: *const fn (this: @TypeOf(ctx)) AsyncIO.Errno!usize, + comptime onWake: *const fn (this: @TypeOf(ctx)) void, + ) Waiter { + return Waiter{ + .ctx = @ptrCast(ctx), + .onWait = @alignCast(@ptrCast(@as(*const anyopaque, @ptrCast(onWait)))), + .onWake = @alignCast(@ptrCast(@as(*const anyopaque, @ptrCast(onWake)))), + }; + } + + pub fn wait(this: *Waiter) AsyncIO.Errno!usize { + return this.onWait(this.ctx); + } + + pub fn wake(this: *Waiter) void { + this.onWake(this.ctx); + } + + pub fn fromUWSLoop(loop: *uws.Loop) Waiter { + const Handlers = struct { + fn onWait(uws_loop: *uws.Loop) AsyncIO.Errno!usize { + uws_loop.run(); + return 0; + } + + fn onWake(uws_loop: *uws.Loop) void { + uws_loop.wakeup(); + } + }; + + return Waiter.init( + loop, + Handlers.onWait, + Handlers.onWake, + ); + } +}; + // We can't know all the packages we need until we've downloaded all the packages // The easy way would be: // 1. Download all packages, parsing their dependencies and enqueuing all dependencies for resolution @@ -1768,6 +1845,7 @@ pub const PackageManager = struct { env: *DotEnv.Loader, progress: Progress = .{}, downloads_node: ?*Progress.Node = null, + scripts_node: ?*Progress.Node = null, progress_name_buf: [768]u8 = undefined, progress_name_buf_dynamic: []u8 = &[_]u8{}, cpu_count: u32 = 0, @@ -1778,7 +1856,6 @@ pub const PackageManager = struct { // progress bar stuff when not stack allocated root_progress_node: *std.Progress.Node = undefined, - root_download_node: std.Progress.Node = undefined, to_remove: []const UpdateRequest = &[_]UpdateRequest{}, to_update: bool = false, @@ -1802,16 +1879,27 @@ pub const PackageManager = struct { preallocated_network_tasks: PreallocatedNetworkTasks = .{ .buffer = undefined, .len = 0 }, pending_tasks: u32 = 0, total_tasks: u32 = 0, + pending_lifecycle_script_tasks: std.atomic.Atomic(u32) = std.atomic.Atomic(u32).init(0), + finished_installing: std.atomic.Atomic(bool) = std.atomic.Atomic(bool).init(false), + + total_scripts: usize = 0, + + root_lifecycle_scripts: ?Package.Scripts.List = null, + + env_configure: ?struct { + root_dir_info: *DirInfo, + bundler: bundler.Bundler, + } = null, lockfile: *Lockfile = undefined, - options: Options = .{}, + options: Options, preinstall_state: std.ArrayListUnmanaged(PreinstallState) = .{}, global_link_dir: ?std.fs.IterableDir = null, global_dir: ?std.fs.IterableDir = null, global_link_dir_path: string = "", - waiter: Waker = undefined, + waiter: Waiter = undefined, wait_count: std.atomic.Atomic(usize) = std.atomic.Atomic(usize).init(0), onWake: WakeHandler = .{}, @@ -1819,6 +1907,9 @@ pub const PackageManager = struct { peer_dependencies: std.fifo.LinearFifo(DependencyID, .Dynamic) = std.fifo.LinearFifo(DependencyID, .Dynamic).init(default_allocator), + uws_event_loop: *uws.Loop, + file_poll_store: bun.Async.FilePoll.Store, + // name hash from alias package name -> aliased package dependency version info known_npm_aliases: NpmAliasMap = .{}, @@ -1833,6 +1924,70 @@ pub const PackageManager = struct { 80, ); + pub fn configureEnvForScripts(this: *PackageManager, ctx: Command.Context, log_level: Options.LogLevel) !struct { *DirInfo, bundler.Bundler } { + if (this.env_configure) |env_configure| { + return .{ env_configure.root_dir_info, env_configure.bundler }; + } + + // We need to figure out the PATH and other environment variables + // to do that, we re-use the code from bun run + // this is expensive, it traverses the entire directory tree going up to the root + // so we really only want to do it when strictly necessary + var this_bundler: bundler.Bundler = undefined; + var ORIGINAL_PATH: string = ""; + + const root_dir_info = try RunCommand.configureEnvForRun( + ctx, + &this_bundler, + this.env, + log_level != .silent, + ); + + var init_cwd_gop = try this.env.map.getOrPutWithoutValue("INIT_CWD"); + if (!init_cwd_gop.found_existing) { + init_cwd_gop.value_ptr.* = .{ + .value = try ctx.allocator.dupe(u8, FileSystem.instance.top_level_dir), + .conditional = false, + }; + } + + { + // if they have ccache installed, put it in env variable `CMAKE_CXX_COMPILER_LAUNCHER` so + // cmake can use it to hopefully speed things up + var buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const ccache_path = bun.which( + &buf, + ORIGINAL_PATH, + FileSystem.instance.top_level_dir, + "ccache", + ) orelse ""; + + if (ccache_path.len > 0) { + var cxx_gop = try this.env.map.getOrPutWithoutValue("CMAKE_CXX_COMPILER_LAUNCHER"); + if (!cxx_gop.found_existing) { + cxx_gop.value_ptr.* = .{ + .value = try this.env.allocator.dupe(u8, "ccache"), + .conditional = false, + }; + } + var c_gop = try this.env.map.getOrPutWithoutValue("CMAKE_C_COMPILER_LAUNCHER"); + if (!c_gop.found_existing) { + c_gop.value_ptr.* = .{ + .value = try this.env.allocator.dupe(u8, "ccache"), + .conditional = false, + }; + } + } + } + + this.env_configure = .{ + .root_dir_info = root_dir_info, + .bundler = this_bundler, + }; + + return .{ this.env_configure.?.root_dir_info, this.env_configure.?.bundler }; + } + pub fn httpProxy(this: *PackageManager, url: URL) ?URL { return this.env.getHttpProxy(url); } @@ -1888,7 +2043,7 @@ pub const PackageManager = struct { pub fn sleep(this: *PackageManager) void { if (this.wait_count.swap(0, .Monotonic) > 0) return; Output.flush(); - this.waiter.wait(); + _ = this.waiter.wait() catch 0; } const DependencyToEnqueue = union(enum) { @@ -2134,15 +2289,15 @@ pub const PackageManager = struct { ) void { if (Output.isEmojiEnabled()) { if (is_first) { - bun.copy(u8, &this.progress_name_buf, emoji); - bun.copy(u8, this.progress_name_buf[emoji.len..], name); + @memcpy(this.progress_name_buf[0..emoji.len], emoji); + @memcpy(this.progress_name_buf[emoji.len..][0..name.len], name); node.name = this.progress_name_buf[0 .. emoji.len + name.len]; } else { - bun.copy(u8, this.progress_name_buf[emoji.len..], name); + @memcpy(this.progress_name_buf[emoji.len..][0..name.len], name); node.name = this.progress_name_buf[0 .. emoji.len + name.len]; } } else { - bun.copy(u8, &this.progress_name_buf, name); + @memcpy(this.progress_name_buf[0..name.len], name); node.name = this.progress_name_buf[0..name.len]; } } @@ -3253,7 +3408,7 @@ pub const PackageManager = struct { } // allow overriding all dependencies unless the dependency is coming directly from an alias, "npm:" - if (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias) { + if (dependency.version.tag != .npm or !dependency.version.value.npm.is_alias and this.lockfile.hasOverrides()) { if (this.lockfile.overrides.get(name_hash)) |new| { debug("override: {s} -> {s}", .{ this.lockfile.str(&dependency.version.literal), this.lockfile.str(&new.literal) }); name = switch (new.tag) { @@ -4862,6 +5017,8 @@ pub const PackageManager = struct { manager.drainDependencyList(); + manager.uws_event_loop.tickWithoutIdle(); + if (comptime log_level.showProgress()) { if (@hasField(@TypeOf(callbacks), "progress_bar") and callbacks.progress_bar == true) { const completed_items = manager.total_tasks - manager.pending_tasks; @@ -4915,6 +5072,8 @@ pub const PackageManager = struct { max_retry_count: u16 = 5, min_simultaneous_requests: usize = 4, + max_concurrent_lifecycle_scripts: usize, + pub fn shouldPrintCommandName(this: *const Options) bool { return this.log_level != .silent and this.do.summary; } @@ -5127,6 +5286,10 @@ pub const PackageManager = struct { } } + if (bun_install.concurrent_scripts) |jobs| { + this.max_concurrent_lifecycle_scripts = jobs; + } + if (bun_install.save_optional) |save| { this.remote_package_features.optional_dependencies = save; this.local_package_features.optional_dependencies = save; @@ -5399,7 +5562,7 @@ pub const PackageManager = struct { }; }; - const ProgressStrings = struct { + pub const ProgressStrings = struct { pub const download_no_emoji_ = "Resolving"; const download_no_emoji: string = download_no_emoji_ ++ "\n"; const download_with_emoji: string = download_emoji ++ download_no_emoji_; @@ -5420,6 +5583,11 @@ pub const PackageManager = struct { const save_with_emoji: string = save_emoji ++ save_no_emoji_; pub const save_emoji: string = " 🔒 "; + pub const script_no_emoji_ = "Running script"; + const script_no_emoji: string = script_no_emoji_ ++ "\n"; + const script_with_emoji: string = script_emoji ++ script_no_emoji_; + pub const script_emoji: string = " ⚙️ "; + pub inline fn download() string { return if (Output.isEmojiEnabled()) download_with_emoji else download_no_emoji; } @@ -5435,6 +5603,10 @@ pub const PackageManager = struct { pub inline fn install() string { return if (Output.isEmojiEnabled()) install_with_emoji else install_no_emoji; } + + pub inline fn script() string { + return if (Output.isEmojiEnabled()) script_with_emoji else script_no_emoji; + } }; const PackageJSONEditor = struct { @@ -5829,9 +6001,6 @@ pub const PackageManager = struct { package_json_cwd = try bun.getFdPath(package_json_file.handle, &package_json_cwd_buf); var entries_option = try fs.fs.readDirectory(fs.top_level_dir, null, 0, true); - var options = Options{ - .global = cli.global, - }; var env: *DotEnv.Loader = brk: { var map = try ctx.allocator.create(DotEnv.Map); @@ -5845,15 +6014,6 @@ pub const PackageManager = struct { env.loadProcess(); try env.load(entries_option.entries, &[_][]u8{}, .production); - if (env.map.get("BUN_INSTALL_VERBOSE") != null) { - PackageManager.verbose_install = true; - } - - if (PackageManager.verbose_install) { - Output.prettyErrorln("Cache Dir: {s}", .{options.cache_directory}); - Output.flush(); - } - var cpu_count = @as(u32, @truncate(((try std.Thread.getCpuCount()) + 1))); if (env.map.get("GOMAXPROCS")) |max_procs| { @@ -5862,6 +6022,24 @@ pub const PackageManager = struct { } else |_| {} } + var options = Options{ + .global = cli.global, + .max_concurrent_lifecycle_scripts = cli.concurrent_scripts orelse cpu_count * 2, + }; + + if (env.map.get("BUN_INSTALL_VERBOSE") != null) { + PackageManager.verbose_install = true; + } + + if (env.map.get("BUN_FEATURE_FLAG_FORCE_WAITER_THREAD") != null) { + JSC.Subprocess.WaiterThread.setShouldUseWaiterThread(); + } + + if (PackageManager.verbose_install) { + Output.prettyErrorln("Cache Dir: {s}", .{options.cache_directory}); + Output.flush(); + } + var workspaces = std.StringArrayHashMap(Semver.Version).init(ctx.allocator); for (workspace_names.values()) |entry| { if (entry.version) |version_string| { @@ -5893,9 +6071,11 @@ pub const PackageManager = struct { .resolve_tasks = TaskChannel.init(), .lockfile = undefined, .root_package_json_file = package_json_file, - .waiter = if (Environment.isPosix) try Waker.init(ctx.allocator) else bun.uws.Loop.get(), + .waiter = Waiter.fromUWSLoop(uws.Loop.get()), .workspaces = workspaces, // .progress + .uws_event_loop = uws.Loop.get(), + .file_poll_store = bun.Async.FilePoll.Store.init(ctx.allocator), }; manager.lockfile = try ctx.allocator.create(Lockfile); @@ -5969,7 +6149,9 @@ pub const PackageManager = struct { // var progress = Progress{}; // var node = progress.start(name: []const u8, estimated_total_items: usize) manager.* = PackageManager{ - .options = .{}, + .options = .{ + .max_concurrent_lifecycle_scripts = cli.concurrent_scripts orelse cpu_count * 2, + }, .network_task_fifo = NetworkQueue.init(), .allocator = allocator, .log = log, @@ -5982,7 +6164,9 @@ pub const PackageManager = struct { .resolve_tasks = TaskChannel.init(), .lockfile = undefined, .root_package_json_file = undefined, - .waiter = if (Environment.isPosix) try Waker.init(allocator) else bun.uws.Loop.get(), + .waiter = Waiter.fromUWSLoop(uws.Loop.get()), + .uws_event_loop = uws.Loop.get(), + .file_poll_store = bun.Async.FilePoll.Store.init(allocator), .workspaces = std.StringArrayHashMap(Semver.Version).init(allocator), }; manager.lockfile = try allocator.create(Lockfile); @@ -5991,7 +6175,6 @@ pub const PackageManager = struct { manager.progress = Progress{}; manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; manager.root_progress_node = manager.progress.start("", 0); - manager.root_download_node = manager.root_progress_node.start(ProgressStrings.download(), 0); } else { manager.options.log_level = .default_no_progress; } @@ -6378,13 +6561,13 @@ pub const PackageManager = struct { "Possible values: \"hardlink\" (default), \"symlink\", \"copyfile\""; const install_params_ = [_]ParamType{ - clap.parseParam("-c, --config ? Specify path to config file (bunfig.toml)") catch unreachable, + clap.parseParam("-c, --config ? Specify path to config file (bunfig.toml)") catch unreachable, clap.parseParam("-y, --yarn Write a yarn.lock file (yarn v1)") catch unreachable, clap.parseParam("-p, --production Don't install devDependencies") catch unreachable, clap.parseParam("--no-save Don't update package.json or save a lockfile") catch unreachable, clap.parseParam("--save Save to package.json (true by default)") catch unreachable, clap.parseParam("--dry-run Don't install anything") catch unreachable, - clap.parseParam("--frozen-lockfile Disallow changes to lockfile") catch unreachable, + clap.parseParam("--frozen-lockfile Disallow changes to lockfile") catch unreachable, clap.parseParam("-f, --force Always request the latest versions from the registry & reinstall all dependencies") catch unreachable, clap.parseParam("--cache-dir Store & load cached data from a specific directory path") catch unreachable, clap.parseParam("--no-cache Ignore manifest cache entirely") catch unreachable, @@ -6398,6 +6581,7 @@ pub const PackageManager = struct { clap.parseParam("--cwd Set a specific cwd") catch unreachable, clap.parseParam("--backend Platform-specific optimizations for installing dependencies. " ++ platform_specific_backend_label) catch unreachable, clap.parseParam("--link-native-bins ... Link \"bin\" from a matching platform-specific \"optionalDependencies\" instead. Default: esbuild, turbo") catch unreachable, + clap.parseParam("--concurrent-scripts Maximum number of concurrent jobs for lifecycle scripts (default 5)") catch unreachable, // clap.parseParam("--omit ... Skip installing dependencies of a certain type. \"dev\", \"optional\", or \"peer\"") catch unreachable, // clap.parseParam("--no-dedupe Disable automatic downgrading of dependencies that would otherwise cause unnecessary duplicate package versions ($BUN_CONFIG_NO_DEDUPLICATE)") catch unreachable, clap.parseParam("-h, --help Print this help menu") catch unreachable, @@ -6476,6 +6660,8 @@ pub const PackageManager = struct { exact: bool = false, + concurrent_scripts: ?usize = null, + const Omit = struct { dev: bool = false, optional: bool = true, @@ -6697,6 +6883,11 @@ pub const PackageManager = struct { cli.exact = args.flag("--exact"); } + if (args.option("--concurrent-scripts")) |concurrency| { + // var buf: [] + cli.concurrent_scripts = std.fmt.parseInt(usize, concurrency, 10) catch null; + } + // for (args.options("--omit")) |omit| { // if (strings.eqlComptime(omit, "dev")) { // cli.omit.dev = true; @@ -7281,19 +7472,137 @@ pub const PackageManager = struct { global_bin_dir: std.fs.IterableDir, destination_dir_subpath_buf: [bun.MAX_PATH_BYTES]u8 = undefined, folder_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined, - install_count: usize = 0, successfully_installed: Bitset, tree_iterator: *Lockfile.Tree.Iterator, + command_ctx: Command.Context, + current_tree_id: Lockfile.Tree.Id = Lockfile.Tree.invalid_id, - // For linking native binaries, we only want to link after we've installed the companion dependencies - // We don't want to introduce dependent callbacks like that for every single package - // Since this will only be a handful, it's fine to just say "run this at the end" - platform_binlinks: std.ArrayListUnmanaged(DeferredBinLink) = std.ArrayListUnmanaged(DeferredBinLink){}, + // fields used for running lifecycle scripts when it's safe + // + /// set of completed tree ids + completed_trees: Bitset, + /// tree id to number of successfully installed deps for id. when count == tree.dependencies.len, mark as complete above + tree_install_counts: []usize, + /// the tree ids a tree depends on before it can run the lifecycle scripts of it's immediate dependencies + tree_ids_to_trees_the_id_depends_on: Bitset.List, + pending_lifecycle_scripts: std.ArrayListUnmanaged(struct { + list: Lockfile.Package.Scripts.List, + tree_id: Lockfile.Tree.Id, + }) = .{}, - pub const DeferredBinLink = struct { - dependency_id: DependencyID, - node_modules_folder: std.fs.IterableDir, - }; + /// Increments the number of installed packages for a tree id and runs available scripts + /// if the tree is finished. + pub fn incrementTreeInstallCount(this: *PackageInstaller, tree_id: Lockfile.Tree.Id, comptime log_level: Options.LogLevel) void { + if (comptime Environment.allow_assert) { + std.debug.assert(tree_id != Lockfile.Tree.invalid_id); + } + + const trees = this.lockfile.buffers.trees.items; + this.tree_install_counts[tree_id] += 1; + if (this.tree_install_counts[tree_id] >= trees[tree_id].dependencies.len) { + this.completed_trees.set(tree_id); + this.runAvailableScripts(log_level); + } + } + + pub fn runAvailableScripts(this: *PackageInstaller, comptime log_level: Options.LogLevel) void { + var i: usize = this.pending_lifecycle_scripts.items.len; + while (i > 0) { + i -= 1; + const entry = this.pending_lifecycle_scripts.items[i]; + const name = entry.list.first().package_name; + const tree_id = entry.tree_id; + if (this.canRunScripts(tree_id)) { + _ = this.pending_lifecycle_scripts.swapRemove(i); + this.manager.spawnPackageLifecycleScripts(this.command_ctx, entry.list, log_level) catch |err| { + if (comptime log_level != .silent) { + const fmt = "\nerror: failed to spawn life-cycle scripts for {s}: {s}\n"; + const args = .{ name, @errorName(err) }; + + if (comptime log_level.showProgress()) { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, + } + } else { + Output.prettyErrorln(fmt, args); + } + } + + if (this.manager.options.enable.fail_early) { + Global.exit(1); + } + + Output.flush(); + this.summary.fail += 1; + }; + } + } + } + + pub fn completeRemainingScripts(this: *PackageInstaller, comptime log_level: Options.LogLevel) void { + for (this.pending_lifecycle_scripts.items) |entry| { + const package_name = entry.list.first().package_name; + while (RunCommand.LifecycleScriptSubprocess.alive_count.load(.Monotonic) >= this.manager.options.max_concurrent_lifecycle_scripts) { + this.manager.uws_event_loop.tickWithTimeout(125); + } + + this.manager.spawnPackageLifecycleScripts(this.command_ctx, entry.list, log_level) catch |err| { + if (comptime log_level != .silent) { + const fmt = "\nerror: failed to spawn life-cycle scripts for {s}: {s}\n"; + const args = .{ package_name, @errorName(err) }; + + if (comptime log_level.showProgress()) { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, + } + } else { + Output.prettyErrorln(fmt, args); + } + } + + if (this.manager.options.enable.fail_early) { + Global.exit(1); + } + + Output.flush(); + this.summary.fail += 1; + }; + } + + while (this.manager.pending_lifecycle_script_tasks.load(.Monotonic) > 0) { + this.manager.uws_event_loop.tickWithTimeout(125); + } + } + + /// Check if a tree is ready to start running lifecycle scripts + pub fn canRunScripts(this: *PackageInstaller, scripts_tree_id: Lockfile.Tree.Id) bool { + const deps = this.tree_ids_to_trees_the_id_depends_on.at(scripts_tree_id); + return (deps.subsetOf(this.completed_trees) or + deps.eql(this.completed_trees)) and + RunCommand.LifecycleScriptSubprocess.alive_count.load(.Monotonic) < this.manager.options.max_concurrent_lifecycle_scripts; + } + + pub fn printTreeDeps(this: *PackageInstaller) void { + for (this.tree_ids_to_trees_the_id_depends_on, 0..) |deps, j| { + std.debug.print("tree #{d:3}: ", .{j}); + for (0..this.lockfile.buffers.trees.items.len) |tree_id| { + std.debug.print("{d} ", .{@intFromBool(deps.isSet(tree_id))}); + } + std.debug.print("\n", .{}); + } + } + + pub fn deinit(this: *PackageInstaller) void { + const allocator = this.manager.allocator; + this.pending_lifecycle_scripts.deinit(this.manager.allocator); + this.completed_trees.deinit(allocator); + allocator.free(this.tree_install_counts); + this.tree_ids_to_trees_the_id_depends_on.deinit(allocator); + } /// Call when you mutate the length of `lockfile.packages` pub fn fixCachedLockfilePackageSlices(this: *PackageInstaller) void { @@ -7329,8 +7638,11 @@ pub const PackageManager = struct { const prev_node_modules_folder = this.node_modules_folder; defer this.node_modules_folder = prev_node_modules_folder; + const prev_tree_id = this.current_tree_id; + defer this.current_tree_id = prev_tree_id; for (callbacks.items) |cb| { - this.node_modules_folder = .{ .dir = .{ .fd = bun.fdcast(cb.node_modules_folder) } }; + this.node_modules_folder = .{ .dir = .{ .fd = bun.fdcast(cb.node_modules_folder.fd) } }; + this.current_tree_id = cb.node_modules_folder.tree_id; this.installPackageWithNameAndResolution(dependency_id, package_id, log_level, name, resolution); } } @@ -7366,6 +7678,7 @@ pub const PackageManager = struct { .allocator = this.lockfile.allocator, .package_name = name, .package_version = resolution_label, + // .install_order = this.tree_iterator.order, }; switch (resolution.tag) { @@ -7468,7 +7781,7 @@ pub const PackageManager = struct { } const needs_install = this.force_install or this.skip_verify_installed_version_number or !installer.verify(resolution, buf); - this.summary.skipped += @as(u32, @intFromBool(!needs_install)); + this.summary.skipped += @intFromBool(!needs_install); if (needs_install) { const result: PackageInstall.Result = switch (resolution.tag) { @@ -7491,83 +7804,24 @@ pub const PackageManager = struct { const bin_task_id = Task.Id.forBinLink(package_id); var task_queue = this.manager.task_queue.getOrPut(this.manager.allocator, bin_task_id) catch unreachable; if (!task_queue.found_existing) { - run_bin_link: { - if (std.mem.indexOfScalar(PackageNameHash, this.options.native_bin_link_allowlist, String.Builder.stringHash(name)) != null) { - this.platform_binlinks.append(this.lockfile.allocator, .{ - .dependency_id = dependency_id, - .node_modules_folder = this.node_modules_folder, - }) catch unreachable; - break :run_bin_link; - } + var bin_linker = Bin.Linker{ + .bin = bin, + .package_installed_node_modules = bun.toFD(this.node_modules_folder.dir.fd), + .global_bin_path = this.options.bin_path, + .global_bin_dir = this.options.global_bin_dir.dir, - var bin_linker = Bin.Linker{ - .bin = bin, - .package_installed_node_modules = bun.toFD(this.node_modules_folder.dir.fd), - .global_bin_path = this.options.bin_path, - .global_bin_dir = this.options.global_bin_dir.dir, + // .destination_dir_subpath = destination_dir_subpath, + .root_node_modules_folder = bun.toFD(this.root_node_modules_folder.dir.fd), + .package_name = strings.StringOrTinyString.init(alias), + .string_buf = buf, + .extern_string_buf = extern_string_buf, + }; - // .destination_dir_subpath = destination_dir_subpath, - .root_node_modules_folder = bun.toFD(this.root_node_modules_folder.dir.fd), - .package_name = strings.StringOrTinyString.init(alias), - .string_buf = buf, - .extern_string_buf = extern_string_buf, - }; - - bin_linker.link(this.manager.options.global); - if (bin_linker.err) |err| { - if (comptime log_level != .silent) { - const fmt = "\nerror: linking {s}: {s}\n"; - const args = .{ alias, @errorName(err) }; - - if (comptime log_level.showProgress()) { - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); - }, - } - } else { - Output.prettyErrorln(fmt, args); - } - } - - if (this.manager.options.enable.fail_early) { - installer.uninstall(); - Global.crash(); - } - } - } - } - } - - if (resolution.tag == .workspace or this.lockfile.trusted_dependencies.contains(@as(u32, @truncate(String.Builder.stringHash(name))))) { - var scripts = this.lockfile.packages.items(.scripts)[package_id]; - if (scripts.hasAny()) { - var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - const path_str = Path.joinAbsString( - bun.getFdPath(bun.toFD(this.node_modules_folder.dir.fd), &path_buf) catch unreachable, - &[_]string{destination_dir_subpath}, - .posix, - ); - - scripts.enqueue(this.lockfile, buf, path_str); - } else if (!scripts.filled) { - var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - const path_str = Path.joinAbsString( - bun.getFdPath(bun.toFD(this.node_modules_folder.dir.fd), &path_buf) catch unreachable, - &[_]string{destination_dir_subpath}, - .auto, - ); - - scripts.enqueueFromPackageJSON( - this.manager.log, - this.lockfile, - this.node_modules_folder.dir, - destination_dir_subpath, - path_str, - ) catch |err| { + bin_linker.link(this.manager.options.global); + if (bin_linker.err) |err| { if (comptime log_level != .silent) { - const fmt = "\nerror: failed to parse life-cycle scripts for {s}: {s}\n"; - const args = .{ name, @errorName(err) }; + const fmt = "\nerror: linking {s}: {s}\n"; + const args = .{ alias, @errorName(err) }; if (comptime log_level.showProgress()) { switch (Output.enable_ansi_colors) { @@ -7581,25 +7835,40 @@ pub const PackageManager = struct { } if (this.manager.options.enable.fail_early) { - Global.exit(1); + installer.uninstall(); + Global.crash(); } - - Output.flush(); - this.summary.fail += 1; - return; - }; + } } } + + if (resolution.tag == .workspace or this.lockfile.hasTrustedDependency(name)) { + this.enqueuePackageScriptsToLockfile( + name, + log_level, + package_id, + destination_dir_subpath, + resolution, + ); + } + + this.incrementTreeInstallCount(this.current_tree_id, log_level); }, .fail => |cause| { if (cause.isPackageMissingFromCache()) { + const context: TaskCallbackContext = .{ + .node_modules_folder = .{ + .fd = bun.toFD(this.node_modules_folder.dir.fd), + .tree_id = this.current_tree_id, + }, + }; switch (resolution.tag) { .git => { this.manager.enqueueGitForCheckout( dependency_id, alias, resolution, - .{ .node_modules_folder = bun.toFD(this.node_modules_folder.dir.fd) }, + context, ); }, .github => { @@ -7609,7 +7878,7 @@ pub const PackageManager = struct { dependency_id, package_id, url, - .{ .node_modules_folder = bun.toFD(this.node_modules_folder.dir.fd) }, + context, ); }, .local_tarball => { @@ -7617,7 +7886,7 @@ pub const PackageManager = struct { dependency_id, alias, resolution, - .{ .node_modules_folder = bun.toFD(this.node_modules_folder.dir.fd) }, + context, ); }, .remote_tarball => { @@ -7625,7 +7894,7 @@ pub const PackageManager = struct { dependency_id, package_id, resolution.value.remote_tarball.slice(buf), - .{ .node_modules_folder = bun.toFD(this.node_modules_folder.dir.fd) }, + context, ); }, .npm => { @@ -7636,7 +7905,7 @@ pub const PackageManager = struct { package_id, resolution.value.npm.version, resolution.value.npm.url.slice(buf), - .{ .node_modules_folder = bun.toFD(this.node_modules_folder.dir.fd) }, + context, ); }, else => { @@ -7663,6 +7932,133 @@ pub const PackageManager = struct { }, else => {}, } + } else { + if (this.manager.summary.new_trusted_dependencies.contains(@truncate(String.Builder.stringHash(name)))) { + // these are packages that are installed but haven't run lifecycle scripts because they weren't + // in `trustedDependencies` + this.enqueuePackageScriptsToLockfile( + name, + log_level, + package_id, + destination_dir_subpath, + resolution, + ); + } + + this.incrementTreeInstallCount(this.current_tree_id, log_level); + } + } + + fn enqueuePackageScriptsToLockfile( + this: *PackageInstaller, + name: string, + comptime log_level: Options.LogLevel, + package_id: PackageID, + destination_dir_subpath: [:0]const u8, + resolution: *const Resolution, + ) void { + const buf = this.lockfile.buffers.string_bytes.items; + var scripts: Package.Scripts = this.lockfile.packages.items(.scripts)[package_id]; + + if (scripts.hasAny()) { + var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + const node_modules_path = bun.getFdPath(bun.toFD(this.node_modules_folder.dir.fd), &path_buf) catch unreachable; + + const add_node_gyp_rebuild_script = if (this.lockfile.hasTrustedDependency(name) and + scripts.install.isEmpty() and + scripts.postinstall.isEmpty()) + brk: { + const binding_dot_gyp_path = Path.joinAbsStringZ( + node_modules_path, + &[_]string{ destination_dir_subpath, "binding.gyp" }, + .posix, + ); + + break :brk Syscall.exists(binding_dot_gyp_path); + } else false; + + const path_str = Path.joinAbsString( + node_modules_path, + &[_]string{destination_dir_subpath}, + .posix, + ); + + if (scripts.enqueue( + this.lockfile, + buf, + path_str, + name, + resolution.tag, + add_node_gyp_rebuild_script, + )) |scripts_list| { + if (this.manager.options.do.run_scripts) { + this.manager.total_scripts += scripts_list.total; + if (this.manager.scripts_node) |scripts_node| { + this.manager.setNodeName( + scripts_node, + scripts_list.items[scripts_list.first_index].?.package_name, + PackageManager.ProgressStrings.script_emoji, + true, + ); + scripts_node.setEstimatedTotalItems(scripts_node.unprotected_estimated_total_items + scripts_list.total); + } + this.pending_lifecycle_scripts.append(this.manager.allocator, .{ + .list = scripts_list, + .tree_id = this.current_tree_id, + }) catch unreachable; + } + } + } else if (!scripts.filled) { + const scripts_list = scripts.enqueueFromPackageJSON( + this.manager.log, + this.lockfile, + this.node_modules_folder.dir, + destination_dir_subpath, + name, + resolution, + ) catch |err| { + if (comptime log_level != .silent) { + const fmt = "\nerror: failed to parse life-cycle scripts for {s}: {s}\n"; + const args = .{ name, @errorName(err) }; + + if (comptime log_level.showProgress()) { + switch (Output.enable_ansi_colors) { + inline else => |enable_ansi_colors| { + this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); + }, + } + } else { + Output.prettyErrorln(fmt, args); + } + } + + if (this.manager.options.enable.fail_early) { + Global.exit(1); + } + + Output.flush(); + this.summary.fail += 1; + return; + }; + + if (this.manager.options.do.run_scripts) { + if (scripts_list) |list| { + this.manager.total_scripts += list.total; + if (this.manager.scripts_node) |scripts_node| { + this.manager.setNodeName( + scripts_node, + list.items[list.first_index].?.package_name, + PackageManager.ProgressStrings.script_emoji, + true, + ); + scripts_node.setEstimatedTotalItems(scripts_node.unprotected_estimated_total_items + list.total); + } + this.pending_lifecycle_scripts.append(this.manager.allocator, .{ + .list = list, + .tree_id = this.current_tree_id, + }) catch unreachable; + } + } } } @@ -7678,6 +8074,7 @@ pub const PackageManager = struct { if (comptime log_level.showProgress()) { this.node.completeOne(); } + this.incrementTreeInstallCount(this.current_tree_id, log_level); return; } @@ -7838,10 +8235,10 @@ pub const PackageManager = struct { pub fn installPackages( this: *PackageManager, - lockfile_: *Lockfile, + ctx: Command.Context, comptime log_level: PackageManager.Options.LogLevel, ) !PackageInstall.Summary { - var lockfile = lockfile_; + var lockfile = this.lockfile; if (!this.options.local_package_features.dev_dependencies) { lockfile = try lockfile.maybeCloneFilteringRootPackages( this.options.local_package_features, @@ -7849,9 +8246,27 @@ pub const PackageManager = struct { ); } + const root_lifecycle_scripts_count = brk: { + if (this.options.do.run_scripts and + this.options.do.install_packages and + this.root_lifecycle_scripts != null) + { + var counter: usize = 0; + + for (this.root_lifecycle_scripts.?.items) |item| { + if (item != null) counter += 1; + } + + this.total_scripts += counter; + break :brk counter; + } + break :brk 0; + }; + var root_node: *Progress.Node = undefined; var download_node: Progress.Node = undefined; var install_node: Progress.Node = undefined; + var scripts_node: Progress.Node = undefined; const options = &this.options; var progress = &this.progress; @@ -7861,7 +8276,9 @@ pub const PackageManager = struct { download_node = root_node.start(ProgressStrings.download(), 0); install_node = root_node.start(ProgressStrings.install(), lockfile.packages.len); + scripts_node = root_node.start(ProgressStrings.script(), root_lifecycle_scripts_count); this.downloads_node = &download_node; + this.scripts_node = &scripts_node; } defer { @@ -7894,10 +8311,18 @@ pub const PackageManager = struct { }; var skip_delete = skip_verify_installed_version_number; - const force_install = options.enable.force_install; if (options.enable.force_install) { skip_verify_installed_version_number = true; skip_delete = false; + + // TODO(dylan-conway): depth first installation + // var node_modules_iter = node_modules_folder.iterateAssumeFirstIteration(); + // defer node_modules_iter.reset(); + // while (try node_modules_iter.next()) |entry| { + // if (entry.kind != .directory or !strings.eqlComptime(entry.name, ".cache")) { + // node_modules_folder.dir.deleteTree(entry.name) catch {}; + // } + // } } var summary = PackageInstall.Summary{}; @@ -7912,6 +8337,54 @@ pub const PackageManager = struct { // to make mistakes harder var parts = lockfile.packages.slice(); + const completed_trees, const tree_ids_to_trees_the_id_depends_on, const tree_install_counts = trees: { + const trees = lockfile.buffers.trees.items; + var completed_trees = try Bitset.initEmpty(this.allocator, trees.len); + var tree_ids_to_trees_the_id_depends_on = try Bitset.List.initEmpty(this.allocator, trees.len, trees.len); + + { + // For each tree id, traverse through it's parents and mark all visited tree + // ids as dependents for the current tree parent + var deps = try Bitset.initEmpty(this.allocator, trees.len); + defer deps.deinit(this.allocator); + for (trees) |_curr| { + var curr = _curr; + tree_ids_to_trees_the_id_depends_on.set(curr.id, curr.id); + + while (curr.parent != Lockfile.Tree.invalid_id) { + deps.set(curr.id); + tree_ids_to_trees_the_id_depends_on.setUnion(curr.parent, deps); + curr = trees[curr.parent]; + } + + deps.setAll(false); + } + } + + var tree_install_counts = try this.allocator.alloc(usize, trees.len); + @memset(tree_install_counts, 0); + + if (comptime Environment.allow_assert) { + if (trees.len > 0) { + // last tree should not depend on another except for itself + std.debug.assert(tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).count() == 1 and tree_ids_to_trees_the_id_depends_on.at(trees.len - 1).isSet(trees.len - 1)); + // root tree should always depend on all trees + std.debug.assert(tree_ids_to_trees_the_id_depends_on.at(0).count() == trees.len); + } + + // a tree should always depend on itself + for (0..trees.len) |j| { + std.debug.assert(tree_ids_to_trees_the_id_depends_on.at(j).isSet(j)); + } + } + + break :trees .{ + completed_trees, + tree_ids_to_trees_the_id_depends_on, + tree_install_counts, + }; + }; + break :brk PackageInstaller{ .manager = this, .options = &this.options, @@ -7928,17 +8401,24 @@ pub const PackageManager = struct { .skip_delete = skip_delete, .summary = &summary, .global_bin_dir = this.options.global_bin_dir, - .force_install = force_install, - .install_count = lockfile.buffers.hoisted_dependencies.items.len, + .force_install = options.enable.force_install, .successfully_installed = try Bitset.initEmpty( this.allocator, lockfile.packages.len, ), .tree_iterator = &iterator, + .command_ctx = ctx, + .tree_ids_to_trees_the_id_depends_on = tree_ids_to_trees_the_id_depends_on, + .completed_trees = completed_trees, + .tree_install_counts = tree_install_counts, }; }; - while (iterator.nextNodeModulesFolder()) |node_modules| { + // installer.printTreeDeps(); + + defer installer.deinit(); + + while (iterator.nextNodeModulesFolder(&installer.completed_trees)) |node_modules| { // We deliberately do not close this folder. // If the package hasn't been downloaded, we will need to install it later // We use this file descriptor to know where to put it. @@ -7953,6 +8433,12 @@ pub const PackageManager = struct { var remaining = node_modules.dependencies; + installer.current_tree_id = node_modules.tree_id; + + if (comptime Environment.allow_assert) { + std.debug.assert(node_modules.dependencies.len == this.lockfile.buffers.trees.items[installer.current_tree_id].dependencies.len); + } + // cache line is 64 bytes on ARM64 and x64 // PackageIDs are 4 bytes // Hence, we can fit up to 64 / 4 = 16 package IDs in a cache line @@ -8025,87 +8511,35 @@ pub const PackageManager = struct { this.sleep(); } + this.finished_installing.store(true, .Monotonic); + if (comptime log_level.showProgress()) { + scripts_node.activate(); + } + + if (comptime Environment.allow_assert) { + for (lockfile.buffers.trees.items) |tree| { + std.debug.assert(installer.tree_install_counts[tree.id] == tree.dependencies.len); + } + } + if (!installer.options.do.install_packages) return error.InstallFailed; summary.successfully_installed = installer.successfully_installed; - { - var parts = lockfile.packages.slice(); - var metas = parts.items(.meta); - var names = parts.items(.name); - var dependencies = lockfile.buffers.dependencies.items; - const resolutions_buffer: []const PackageID = lockfile.buffers.resolutions.items; - const resolution_lists: []const Lockfile.PackageIDSlice = parts.items(.resolutions); - outer: for (installer.platform_binlinks.items) |deferred| { - const dependency_id = deferred.dependency_id; - const package_id = resolutions_buffer[dependency_id]; - const folder = deferred.node_modules_folder; - const package_resolutions: []const PackageID = resolution_lists[package_id].get(resolutions_buffer); - const original_bin: Bin = installer.bins[package_id]; + installer.completeRemainingScripts(log_level); - for (package_resolutions) |resolved_id| { - if (resolved_id >= names.len) continue; - const meta: Lockfile.Package.Meta = metas[resolved_id]; + if (root_lifecycle_scripts_count > 0) { + // root lifecycle scripts can run now that all dependencies are installed + // and their lifecycle script have finished + try this.spawnPackageLifecycleScripts(ctx, this.root_lifecycle_scripts.?, log_level); + } - // This is specifically for platform-specific binaries - if (meta.os == .all and meta.arch == .all) continue; + while (this.pending_lifecycle_script_tasks.load(.Monotonic) > 0) { + this.uws_event_loop.tickWithTimeout(125); + } - // Don't attempt to link incompatible binaries - if (meta.isDisabled()) continue; - - const name = lockfile.str(&dependencies[dependency_id].name); - - var bin_linker = Bin.Linker{ - .bin = original_bin, - .package_installed_node_modules = bun.toFD(folder.dir.fd), - .root_node_modules_folder = bun.toFD(node_modules_folder.dir.fd), - .global_bin_path = this.options.bin_path, - .global_bin_dir = this.options.global_bin_dir.dir, - - .package_name = strings.StringOrTinyString.init(name), - .string_buf = lockfile.buffers.string_bytes.items, - .extern_string_buf = lockfile.buffers.extern_strings.items, - }; - - bin_linker.link(this.options.global); - - if (bin_linker.err) |err| { - if (comptime log_level != .silent) { - const fmt = "\nerror: linking {s}: {s}\n"; - const args = .{ name, @errorName(err) }; - - if (comptime log_level.showProgress()) { - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); - }, - } - } else { - Output.prettyErrorln(fmt, args); - } - } - - if (this.options.enable.fail_early) Global.crash(); - } - - continue :outer; - } - - if (comptime log_level != .silent) { - const fmt = "\nwarn: no compatible binaries found for {s}\n"; - const args = .{lockfile.str(&names[package_id])}; - - if (comptime log_level.showProgress()) { - switch (Output.enable_ansi_colors) { - inline else => |enable_ansi_colors| { - this.progress.log(comptime Output.prettyFmt(fmt, enable_ansi_colors), args); - }, - } - } else { - Output.prettyErrorln(fmt, args); - } - } - } + if (comptime log_level.showProgress()) { + scripts_node.end(); } } @@ -8260,6 +8694,10 @@ pub const PackageManager = struct { Global.crash(); } + if (manager.summary.new_trusted_dependencies.count() > 0) { + needs_new_lockfile = true; + } + if (had_any_diffs) { var builder_ = manager.lockfile.stringBuilder(); // ensure we use one pointer to reference it instead of creating new ones and potentially aliasing @@ -8528,12 +8966,7 @@ pub const PackageManager = struct { try manager.setupGlobalDir(&ctx); } - // We don't always save the lockfile. - // This is for two reasons. - // 1. It's unnecessary work if there are no changes - // 2. There is a determinism issue in the file where alignment bytes might be garbage data - // This is a bug that needs to be fixed, however we can work around it for now - // by avoiding saving the lockfile + // It's unnecessary work to re-save the lockfile if there are no changes if (manager.options.do.save_lockfile and (did_meta_hash_change or manager.lockfile.isEmpty() or manager.options.enable.force_save_lockfile)) save: { @@ -8557,19 +8990,19 @@ pub const PackageManager = struct { break :save; } - var node: *Progress.Node = undefined; + var save_node: *Progress.Node = undefined; if (comptime log_level.showProgress()) { - node = manager.progress.start(ProgressStrings.save(), 0); + save_node = manager.progress.start(ProgressStrings.save(), 0); manager.progress.supports_ansi_escape_codes = Output.enable_ansi_colors_stderr; - node.activate(); + save_node.activate(); manager.progress.refresh(); } manager.lockfile.saveToDisk(manager.options.lockfile_path); if (comptime log_level.showProgress()) { - node.end(); + save_node.end(); manager.progress.refresh(); manager.progress.root.end(); manager.progress = .{}; @@ -8579,50 +9012,44 @@ pub const PackageManager = struct { } } + const binding_dot_gyp_path = Path.joinAbsStringZ( + Fs.FileSystem.instance.top_level_dir, + &[_]string{"binding.gyp"}, + .posix, + ); if (root.scripts.hasAny()) { - root.scripts.enqueue( + const add_node_gyp_rebuild_script = root.scripts.install.isEmpty() and root.scripts.postinstall.isEmpty() and Syscall.exists(binding_dot_gyp_path); + + manager.root_lifecycle_scripts = root.scripts.enqueue( manager.lockfile, manager.lockfile.buffers.string_bytes.items, strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir), + root.name.slice(manager.lockfile.buffers.string_bytes.items), + .root, + add_node_gyp_rebuild_script, ); + } else { + if (Syscall.exists(binding_dot_gyp_path)) { + // no scripts exist but auto node gyp script needs to be added + manager.root_lifecycle_scripts = root.scripts.enqueue( + manager.lockfile, + manager.lockfile.buffers.string_bytes.items, + strings.withoutTrailingSlash(Fs.FileSystem.instance.top_level_dir), + root.name.slice(manager.lockfile.buffers.string_bytes.items), + .root, + true, + ); + } } var install_summary = PackageInstall.Summary{}; if (manager.options.do.install_packages) { install_summary = try manager.installPackages( - manager.lockfile, + ctx, log_level, ); } - // Install script order for npm 8.3.0: - // 1. preinstall - // 2. install - // 3. postinstall - // 4. preprepare - // 5. prepare - // 6. postprepare - const run_lifecycle_scripts = manager.options.do.run_scripts and manager.lockfile.scripts.hasAny() and manager.options.do.install_packages; - if (run_lifecycle_scripts) { - // We need to figure out the PATH and other environment variables - // to do that, we re-use the code from bun run - // this is expensive, it traverses the entire directory tree going up to the root - // so we really only want to do it when strictly necessary - var this_bundler: bundler.Bundler = undefined; - var ORIGINAL_PATH: string = ""; - _ = try RunCommand.configureEnvForRun( - ctx, - &this_bundler, - manager.env, - &ORIGINAL_PATH, - log_level != .silent, - false, - ); - - // 1. preinstall - try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "preinstall"); - } - if (needs_new_lockfile) { manager.summary.add = @as(u32, @truncate(manager.lockfile.packages.len)); } @@ -8729,20 +9156,6 @@ pub const PackageManager = struct { } } - if (run_lifecycle_scripts and install_summary.fail == 0) { - // 2. install - // 3. postinstall - try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "install"); - try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "postinstall"); - - // 4. preprepare - // 5. prepare - // 6. postprepare - try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "preprepare"); - try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "prepare"); - try manager.lockfile.scripts.run(manager.allocator, manager.env, log_level != .silent, "postprepare"); - } - if (comptime log_level != .silent) { if (manager.options.do.summary) { if (!printed_timestamp) { @@ -8755,6 +9168,21 @@ pub const PackageManager = struct { Output.flush(); } + + pub fn spawnPackageLifecycleScripts( + this: *PackageManager, + ctx: Command.Context, + list: Lockfile.Package.Scripts.List, + comptime log_level: PackageManager.Options.LogLevel, + ) !void { + const root_dir_info, const this_bundler = try this.configureEnvForScripts(ctx, log_level); + var original_path: string = undefined; + try RunCommand.configurePathForRun(ctx, root_dir_info, &this_bundler, &original_path, list.first().cwd, false); + const envp = try this_bundler.env.map.createNullDelimitedEnvMap(this.allocator); + try this_bundler.env.map.put("PATH", original_path); + + try RunCommand.spawnPackageScripts(this, list, envp); + } }; const Package = Lockfile.Package; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index b07c2ede87..e4589f9582 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -81,6 +81,7 @@ const PackageNameHash = Install.PackageNameHash; const Resolution = @import("./resolution.zig").Resolution; const Crypto = @import("../sha.zig").Hashers; const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; +const StaticHashMap = @import("../StaticHashMap.zig").StaticHashMap; const MetaHash = [std.crypto.hash.sha2.Sha512256.digest_length]u8; const zero_hash = std.mem.zeroes(MetaHash); @@ -107,21 +108,37 @@ allocator: Allocator, scratch: Scratch = .{}, scripts: Scripts = .{}, -trusted_dependencies: NameHashSet = .{}, workspace_paths: NameHashMap = .{}, workspace_versions: VersionHashMap = .{}, +trusted_dependencies: NameHashSet = .{}, overrides: OverrideMap = .{}, const Stream = std.io.FixedBufferStream([]u8); pub const default_filename = "bun.lockb"; +pub fn hasTrustedDependencies(this: *const Lockfile) bool { + return this.trusted_dependencies.count() > 0; +} + pub const Scripts = struct { - const Entry = struct { + const MAX_PARALLEL_PROCESSES = 10; + pub const Entry = struct { cwd: string, script: string, + package_name: string, }; - const Entries = std.ArrayListUnmanaged(Entry); + pub const Entries = std.ArrayListUnmanaged(Entry); + + pub const names = [_]string{ + "preinstall", + "install", + "postinstall", + "preprepare", + "prepare", + "postprepare", + }; + const RunCommand = @import("../cli/run_command.zig").RunCommand; preinstall: Entries = .{}, @@ -132,21 +149,22 @@ pub const Scripts = struct { postprepare: Entries = .{}, pub fn hasAny(this: *Scripts) bool { - inline for (Package.Scripts.Hooks) |hook| { + inline for (Scripts.names) |hook| { if (@field(this, hook).items.len > 0) return true; } return false; } - pub fn run(this: *Scripts, allocator: Allocator, env: *DotEnv.Loader, silent: bool, comptime hook: []const u8) !void { - for (@field(this, hook).items) |entry| { - if (comptime Environment.allow_assert) std.debug.assert(Fs.FileSystem.instance_loaded); - _ = try RunCommand.runPackageScript(allocator, entry.script, hook, entry.cwd, env, &.{}, silent); + pub fn count(this: *Scripts) usize { + var res: usize = 0; + inline for (Scripts.names) |hook| { + res += @field(this, hook).items.len; } + return res; } pub fn deinit(this: *Scripts, allocator: Allocator) void { - inline for (Package.Scripts.Hooks) |hook| { + inline for (Scripts.names) |hook| { const list = &@field(this, hook); for (list.items) |entry| { allocator.free(entry.cwd); @@ -267,7 +285,7 @@ pub const Tree = struct { } pub const root_dep_id: DependencyID = invalid_package_id - 1; - const invalid_id: Id = std.math.maxInt(Id); + pub const invalid_id: Id = std.math.maxInt(Id); const dependency_loop = invalid_id - 1; const hoisted = invalid_id - 2; const error_id = hoisted; @@ -277,6 +295,7 @@ pub const Tree = struct { pub const NodeModulesFolder = struct { relative_path: stringZ, dependencies: []const DependencyID, + tree_id: Tree.Id, }; pub const Iterator = struct { @@ -284,7 +303,7 @@ pub const Tree = struct { dependency_ids: []const DependencyID, dependencies: []const Dependency, resolutions: []const PackageID, - tree_id: Id = 0, + tree_id: Id, path_buf: [bun.MAX_PATH_BYTES]u8 = undefined, path_buf_len: usize = 0, last_parent: Id = invalid_id, @@ -296,6 +315,7 @@ pub const Tree = struct { pub fn init(lockfile: *const Lockfile) Iterator { return .{ .trees = lockfile.buffers.trees.items, + .tree_id = 0, .dependency_ids = lockfile.buffers.hoisted_dependencies.items, .dependencies = lockfile.buffers.dependencies.items, .resolutions = lockfile.buffers.resolutions.items, @@ -311,10 +331,13 @@ pub const Tree = struct { this.string_buf = lockfile.buffers.string_bytes.items; } - pub fn nextNodeModulesFolder(this: *Iterator) ?NodeModulesFolder { + pub fn nextNodeModulesFolder(this: *Iterator, completed_trees: ?*Bitset) ?NodeModulesFolder { if (this.tree_id >= this.trees.len) return null; while (this.trees[this.tree_id].dependencies.len == 0) { + if (completed_trees) |_completed_trees| { + _completed_trees.set(this.tree_id); + } this.tree_id += 1; if (this.tree_id >= this.trees.len) return null; } @@ -361,6 +384,7 @@ pub const Tree = struct { return .{ .relative_path = relative_path, .dependencies = tree.dependencies.get(this.dependency_ids), + .tree_id = tree.id, }; } }; @@ -1064,7 +1088,9 @@ pub const Printer = struct { writer: Writer, ) !void { var fs = &FileSystem.instance; - var options = PackageManager.Options{}; + var options = PackageManager.Options{ + .max_concurrent_lifecycle_scripts = 1, + }; var entries_option = try fs.fs.readDirectory(fs.top_level_dir, null, 0, true); @@ -1946,6 +1972,10 @@ pub const PackageIndex = struct { }; }; +pub inline fn hasOverrides(this: *Lockfile) bool { + return this.overrides.map.count() > 0; +} + pub const OverrideMap = struct { const debug = Output.scoped(.OverrideMap, false); @@ -2302,13 +2332,17 @@ pub const Package = extern struct { postprepare: String = .{}, filled: bool = false, - pub const Hooks = .{ - "preinstall", - "install", - "postinstall", - "preprepare", - "prepare", - "postprepare", + pub const List = struct { + items: [Lockfile.Scripts.names.len]?Lockfile.Scripts.Entry, + first_index: u8, + total: u8, + + pub fn first(this: Package.Scripts.List) Lockfile.Scripts.Entry { + if (comptime Environment.allow_assert) { + std.debug.assert(this.items[this.first_index] != null); + } + return this.items[this.first_index].?; + } }; pub fn clone(this: *const Package.Scripts, buf: []const u8, comptime Builder: type, builder: Builder) Package.Scripts { @@ -2316,41 +2350,161 @@ pub const Package = extern struct { var scripts = Package.Scripts{ .filled = true, }; - inline for (Package.Scripts.Hooks) |hook| { + inline for (Lockfile.Scripts.names) |hook| { @field(scripts, hook) = builder.append(String, @field(this, hook).slice(buf)); } return scripts; } pub fn count(this: *const Package.Scripts, buf: []const u8, comptime Builder: type, builder: Builder) void { - inline for (Package.Scripts.Hooks) |hook| { + inline for (Lockfile.Scripts.names) |hook| { builder.count(@field(this, hook).slice(buf)); } } pub fn hasAny(this: *const Package.Scripts) bool { - inline for (Package.Scripts.Hooks) |hook| { + inline for (Lockfile.Scripts.names) |hook| { if (!@field(this, hook).isEmpty()) return true; } return false; } - pub fn enqueue(this: *const Package.Scripts, lockfile: *Lockfile, buf: []const u8, cwd: string) void { - inline for (Package.Scripts.Hooks) |hook| { - const script = @field(this, hook); - if (!script.isEmpty()) { - @field(lockfile.scripts, hook).append(lockfile.allocator, .{ - .cwd = lockfile.allocator.dupe(u8, cwd) catch unreachable, - .script = lockfile.allocator.dupe(u8, script.slice(buf)) catch unreachable, - }) catch unreachable; + pub fn enqueue( + this: *const Package.Scripts, + lockfile: *Lockfile, + lockfile_buf: []const u8, + _cwd: string, + package_name: string, + resolution_tag: Resolution.Tag, + add_node_gyp_rebuild_script: bool, + ) ?Package.Scripts.List { + var cwd: ?string = null; + var script_index: u8 = 0; + var first_script_index: i8 = -1; + var scripts: [6]?Lockfile.Scripts.Entry = .{null} ** 6; + var counter: u8 = 0; + + if (add_node_gyp_rebuild_script) { + // missing install and postinstall, only need to check preinstall + if (!this.preinstall.isEmpty()) { + const entry: Lockfile.Scripts.Entry = .{ + .cwd = cwd orelse brk: { + cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; + break :brk cwd.?; + }, + .script = lockfile.allocator.dupe(u8, this.preinstall.slice(lockfile_buf)) catch unreachable, + .package_name = package_name, + }; + if (first_script_index == -1) first_script_index = @intCast(script_index); + scripts[script_index] = entry; + lockfile.scripts.preinstall.append(lockfile.allocator, entry) catch unreachable; + counter += 1; + } + script_index += 1; + + const entry: Lockfile.Scripts.Entry = .{ + .cwd = cwd orelse brk: { + cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; + break :brk cwd.?; + }, + .script = lockfile.allocator.dupe(u8, "node-gyp rebuild") catch unreachable, + .package_name = package_name, + }; + if (first_script_index == -1) first_script_index = @intCast(script_index); + scripts[script_index] = entry; + script_index += 2; + lockfile.scripts.install.append(lockfile.allocator, entry) catch unreachable; + counter += 1; + } else { + const install_scripts = .{ + "preinstall", + "install", + "postinstall", + }; + + inline for (install_scripts) |hook| { + const script = @field(this, hook); + if (!script.isEmpty()) { + const entry: Lockfile.Scripts.Entry = .{ + .cwd = cwd orelse brk: { + cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; + break :brk cwd.?; + }, + .script = lockfile.allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, + .package_name = package_name, + }; + if (first_script_index == -1) first_script_index = @intCast(script_index); + scripts[script_index] = entry; + @field(lockfile.scripts, hook).append(lockfile.allocator, entry) catch unreachable; + counter += 1; + } + script_index += 1; } } + + switch (resolution_tag) { + .git, .github, .gitlab, .root => { + const prepare_scripts = .{ + "preprepare", + "prepare", + "postprepare", + }; + + inline for (prepare_scripts) |hook| { + const script = @field(this, hook); + if (!script.isEmpty()) { + const entry: Lockfile.Scripts.Entry = .{ + .cwd = cwd orelse brk: { + cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; + break :brk cwd.?; + }, + .script = lockfile.allocator.dupe(u8, script.slice(lockfile_buf)) catch unreachable, + .package_name = package_name, + }; + if (first_script_index == -1) first_script_index = @intCast(script_index); + scripts[script_index] = entry; + @field(lockfile.scripts, hook).append(lockfile.allocator, entry) catch unreachable; + counter += 1; + } + script_index += 1; + } + }, + .workspace => { + script_index += 1; + if (!this.prepare.isEmpty()) { + const entry: Lockfile.Scripts.Entry = .{ + .cwd = cwd orelse brk: { + cwd = lockfile.allocator.dupe(u8, _cwd) catch unreachable; + break :brk cwd.?; + }, + .script = lockfile.allocator.dupe(u8, this.prepare.slice(lockfile_buf)) catch unreachable, + .package_name = package_name, + }; + if (first_script_index == -1) first_script_index = @intCast(script_index); + scripts[script_index] = entry; + lockfile.scripts.prepare.append(lockfile.allocator, entry) catch unreachable; + counter += 1; + } + script_index += 2; + }, + else => {}, + } + + if (first_script_index != -1) { + return .{ + .items = scripts, + .first_index = @intCast(first_script_index), + .total = counter, + }; + } + + return null; } pub fn parseCount(allocator: Allocator, builder: *Lockfile.StringBuilder, json: Expr) void { if (json.asProperty("scripts")) |scripts_prop| { if (scripts_prop.expr.data == .e_object) { - inline for (Package.Scripts.Hooks) |script_name| { + inline for (Lockfile.Scripts.names) |script_name| { if (scripts_prop.expr.get(script_name)) |script| { if (script.asString(allocator)) |input| { builder.count(input); @@ -2364,7 +2518,7 @@ pub const Package = extern struct { pub fn parseAlloc(this: *Package.Scripts, allocator: Allocator, builder: *Lockfile.StringBuilder, json: Expr) void { if (json.asProperty("scripts")) |scripts_prop| { if (scripts_prop.expr.data == .e_object) { - inline for (Package.Scripts.Hooks) |script_name| { + inline for (Lockfile.Scripts.names) |script_name| { if (scripts_prop.expr.get(script_name)) |script| { if (script.asString(allocator)) |input| { @field(this, script_name) = builder.append(String, input); @@ -2381,8 +2535,17 @@ pub const Package = extern struct { lockfile: *Lockfile, node_modules: std.fs.Dir, subpath: [:0]const u8, - cwd: string, - ) !void { + name: string, + resolution: *const Resolution, + ) !?Package.Scripts.List { + var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; + + const cwd = Path.joinAbsString( + bun.getFdPath(bun.toFD(node_modules.fd), &path_buf) catch unreachable, + &[_]string{subpath}, + .auto, + ); + var json_file_fd = try bun.sys.openat( bun.toFD(node_modules.fd), bun.path.joinZ([_]string{ subpath, "package.json" }, .auto), @@ -2410,7 +2573,29 @@ pub const Package = extern struct { try builder.allocate(); this.parseAlloc(lockfile.allocator, &builder, json); - this.enqueue(lockfile, tmp.buffers.string_bytes.items, cwd); + const node_modules_path = bun.getFdPath(bun.toFD(node_modules.fd), &path_buf) catch unreachable; + + const add_node_gyp_rebuild_script = if (lockfile.hasTrustedDependency(name) and + this.install.isEmpty() and + this.postinstall.isEmpty()) + brk: { + const binding_dot_gyp_path = Path.joinAbsStringZ( + node_modules_path, + &[_]string{ subpath, "binding.gyp" }, + .posix, + ); + + break :brk bun.sys.exists(binding_dot_gyp_path); + } else false; + + return this.enqueue( + lockfile, + tmp.buffers.string_bytes.items, + cwd, + name, + resolution.tag, + add_node_gyp_rebuild_script, + ); } }; @@ -2870,6 +3055,8 @@ pub const Package = extern struct { update: u32 = 0, overrides_changed: bool = false, + new_trusted_dependencies: NameHashSet = .{}, + pub inline fn sum(this: *Summary, that: Summary) void { this.add += that.add; this.remove += that.remove; @@ -2914,6 +3101,15 @@ pub const Package = extern struct { } } + { + var to_lockfile_itr = to_lockfile.trusted_dependencies.iterator(); + while (to_lockfile_itr.next()) |entry| { + if (!from_lockfile.trusted_dependencies.contains(entry.key_ptr.*)) { + try summary.new_trusted_dependencies.put(allocator, entry.key_ptr.*, {}); + } + } + } + for (from_deps, 0..) |*from_dep, i| { found: { const prev_i = to_i; @@ -3009,7 +3205,7 @@ pub const Package = extern struct { summary.add = @truncate((to_deps.len + skipped_workspaces) - (from_deps.len - summary.remove)); - inline for (Package.Scripts.Hooks) |hook| { + inline for (Lockfile.Scripts.names) |hook| { if (!@field(to.scripts, hook).eql( @field(from.scripts, hook), to_lockfile.buffers.string_bytes.items, @@ -5253,6 +5449,37 @@ pub fn resolve(this: *Lockfile, package_name: []const u8, version: Dependency.Ve return null; } +/// The default list of trusted dependencies is a static hashmap +const default_trusted_dependencies = brk: { + const max_values = 512; + + var map: StaticHashMap([]const u8, u0, std.hash_map.StringContext, max_values) = .{}; + + // This file contains a list of dependencies that Bun runs `postinstall` on by default. + const data = @embedFile("./default-trusted-dependencies.txt"); + @setEvalBranchQuota(99999); + + var iter = std.mem.tokenizeAny(u8, data, " \n\t"); + while (iter.next()) |dep| { + if (map.len == max_values) { + @compileError("default-trusted-dependencies.txt is too large, please increase 'max_values' in lockfile.zig"); + } + map.putAssumeCapacity(dep, 0); + } + + break :brk ↦ +}; + +pub fn hasTrustedDependency(this: *Lockfile, name: []const u8) bool { + if (this.hasTrustedDependencies()) { + const hash = @as(u32, @truncate(String.Builder.stringHash(name))); + return this.trusted_dependencies.contains(hash) or default_trusted_dependencies.has(name); + } + + // always search through default trusted dependencies + return default_trusted_dependencies.has(name); +} + pub fn jsonStringifyDependency(this: *const Lockfile, w: anytype, dep: Dependency, res: ?PackageID) !void { const sb = this.buffers.string_bytes.items; var buf: [2048]u8 = undefined; diff --git a/src/install/resolution.zig b/src/install/resolution.zig index 84d43ff1a3..cbdb88c397 100644 --- a/src/install/resolution.zig +++ b/src/install/resolution.zig @@ -23,6 +23,10 @@ pub const Resolution = extern struct { }; } + pub fn isGit(this: *const Resolution) bool { + return this.tag.isGit(); + } + pub fn order( lhs: *const Resolution, rhs: *const Resolution, @@ -332,5 +336,9 @@ pub const Resolution = extern struct { single_file_module = 100, _, + + pub fn isGit(this: Tag) bool { + return this == .git or this == .github or this == .gitlab; + } }; }; diff --git a/src/sys.zig b/src/sys.zig index 31e4676611..180486fb19 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -619,15 +619,39 @@ pub fn closeAllowingStdoutAndStderr(fd: bun.FileDescriptor) ?Syscall.Error { if (comptime Environment.isMac) { // This avoids the EINTR problem. return switch (system.getErrno(system.@"close$NOCANCEL"(fd))) { - .BADF => Syscall.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close }, - else => null, + // "fd isn't a valid open file descriptor." + .BADF => { + if (Environment.isDebug) { + bun.Output.prettyErrorln("close({d}) = EBADF", .{fd}); + std.debug.dumpCurrentStackTrace(null); + } + return Syscall.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close }; + }, + else => |err| { + if (Environment.isDebug and err != .SUCCESS) { + bun.Output.prettyErrorln("close({d}) = {s}", .{ fd, @tagName(err) }); + } + return null; + }, }; } if (comptime Environment.isLinux) { return switch (linux.getErrno(linux.close(fd))) { - .BADF => Syscall.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close }, - else => null, + // "fd isn't a valid open file descriptor." + .BADF => { + if (Environment.isDebug) { + bun.Output.prettyErrorln("close({d}) = EBADF", .{fd}); + std.debug.dumpCurrentStackTrace(null); + } + return Syscall.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close }; + }, + else => |err| { + if (Environment.isDebug and err != .SUCCESS) { + bun.Output.prettyErrorln("close({d}) = {s}", .{ fd, @tagName(err) }); + } + return null; + }, }; } @@ -635,6 +659,7 @@ pub fn closeAllowingStdoutAndStderr(fd: bun.FileDescriptor) ?Syscall.Error { std.debug.assert(fd != 0); if (kernel32.CloseHandle(bun.fdcast(fd)) == 0) { + log("close({}) = FAILED", .{fd}); return Syscall.Error{ .errno = @intFromEnum(os.E.BADF), .syscall = .close }; } diff --git a/test/cli/install/bun-add.test.ts b/test/cli/install/bun-add.test.ts index 9f56f5eee6..400bc59e32 100644 --- a/test/cli/install/bun-add.test.ts +++ b/test/cli/install/bun-add.test.ts @@ -61,7 +61,9 @@ it("should add existing package", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); - expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual(["bun add", " Saved lockfile", ""]); + expect(err).not.toContain("error:"); + expect(err).toContain("bun add"); + expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ @@ -270,6 +272,7 @@ it("should add dependency with capital letters", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -325,6 +328,7 @@ it("should add exact version", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -443,6 +447,7 @@ it("should add dependency with specified semver", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -504,6 +509,7 @@ it("should add dependency (GitHub)", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -591,6 +597,7 @@ it("should add dependency alongside workspaces", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -664,6 +671,7 @@ it("should add aliased dependency (npm)", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -725,6 +733,7 @@ it("should add aliased dependency (GitHub)", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -810,6 +819,7 @@ it("should let you add the same package twice", async () => { }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); + expect(err1).not.toContain("error:"); expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); @@ -862,6 +872,7 @@ it("should let you add the same package twice", async () => { }); expect(stderr2).toBeDefined(); const err2 = await new Response(stderr2).text(); + expect(err2).not.toContain("error:"); expect(err2).toContain("Saved lockfile"); expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); @@ -925,6 +936,7 @@ it("should install version tagged with `latest` by default", async () => { }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); + expect(err1).not.toContain("error:"); expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); @@ -977,6 +989,7 @@ it("should install version tagged with `latest` by default", async () => { }); expect(stderr2).toBeDefined(); const err2 = await new Response(stderr2).text(); + expect(err2).not.toContain("error:"); expect(err2).toContain("Saved lockfile"); expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); @@ -1036,6 +1049,7 @@ it("should handle Git URL in dependencies (SCP-style)", async () => { }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); + expect(err1).not.toContain("error:"); expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); let out1 = await new Response(stdout1).text(); @@ -1102,6 +1116,7 @@ it("should handle Git URL in dependencies (SCP-style)", async () => { }); expect(stderr2).toBeDefined(); const err2 = await new Response(stderr2).text(); + expect(err2).not.toContain("error:"); expect(err2).not.toContain("Saved lockfile"); expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); @@ -1183,6 +1198,7 @@ it("should prefer optionalDependencies over dependencies of the same name", asyn }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -1243,6 +1259,7 @@ it("should prefer dependencies over peerDependencies of the same name", async () }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -1301,6 +1318,7 @@ it("should add dependency without duplication", async () => { }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); + expect(err1).not.toContain("error:"); expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); @@ -1350,6 +1368,7 @@ it("should add dependency without duplication", async () => { }); expect(stderr2).toBeDefined(); const err2 = await new Response(stderr2).text(); + expect(err2).not.toContain("error:"); expect(err2).toContain("Saved lockfile"); expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); @@ -1401,6 +1420,7 @@ it("should add dependency without duplication (GitHub)", async () => { }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); + expect(err1).not.toContain("error:"); expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); @@ -1462,6 +1482,7 @@ it("should add dependency without duplication (GitHub)", async () => { }); expect(stderr2).toBeDefined(); const err2 = await new Response(stderr2).text(); + expect(err2).not.toContain("error:"); expect(err2).toContain("Saved lockfile"); expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); @@ -1540,6 +1561,7 @@ it("should add dependencies to workspaces directly", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); @@ -1603,7 +1625,9 @@ async function installRedirectsToAdd(saveFlagFirst: boolean) { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); - expect(err.replace(/^(.*?) v[^\n]+/, "$1").split(/\r?\n/)).toEqual(["bun add", " Saved lockfile", ""]); + expect(err).not.toContain("error:"); + expect(err).toContain("bun add"); + expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index b9d2f65c78..ee319f160b 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -1,6 +1,6 @@ import { file, listen, Socket, spawn } from "bun"; import { afterAll, afterEach, beforeAll, beforeEach, expect, it, describe, test } from "bun:test"; -import { bunExe, bunEnv as env, ignoreMimallocWarning, withoutMimalloc } from "harness"; +import { bunExe, bunEnv as env } from "harness"; import { access, mkdir, readlink, realpath, rm, writeFile } from "fs/promises"; import { join } from "path"; import { @@ -21,8 +21,6 @@ afterAll(dummyAfterAll); beforeEach(dummyBeforeEach); afterEach(dummyAfterEach); -ignoreMimallocWarning({ beforeAll, afterAll }); - describe("chooses", () => { async function runTest(latest: string, range: string, chosen = "0.0.5") { const exeName: string = { @@ -865,12 +863,12 @@ it("should handle life-cycle scripts within workspaces", async () => { name: "Foo", version: "0.0.1", scripts: { - install: [bunExe(), "index.js"].join(" "), + install: [bunExe(), "install.js"].join(" "), }, workspaces: ["bar"], }), ); - await writeFile(join(package_dir, "index.js"), 'console.log("[scripts:run] Foo");'); + await writeFile(join(package_dir, "install.js"), 'await require("fs/promises").writeFile("foo.txt", "foo!");'); await mkdir(join(package_dir, "bar")); await writeFile( join(package_dir, "bar", "package.json"), @@ -878,11 +876,14 @@ it("should handle life-cycle scripts within workspaces", async () => { name: "Bar", version: "0.0.2", scripts: { - preinstall: [bunExe(), "index.js"].join(" "), + preinstall: [bunExe(), "preinstall.js"].join(" "), }, }), ); - await writeFile(join(package_dir, "bar", "index.js"), 'console.log("[scripts:run] Bar");'); + await writeFile( + join(package_dir, "bar", "preinstall.js"), + 'await require("fs/promises").writeFile("bar.txt", "bar!");', + ); const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], cwd: package_dir, @@ -893,13 +894,10 @@ it("should handle life-cycle scripts within workspaces", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); - expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", - "[scripts:run] Foo", "", " 1 package installed", ]); @@ -907,6 +905,8 @@ it("should handle life-cycle scripts within workspaces", async () => { expect(requested).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); await access(join(package_dir, "bun.lockb")); }); @@ -919,7 +919,7 @@ it("should handle life-cycle scripts during re-installation", async () => { name: "Foo", version: "0.0.1", scripts: { - install: [bunExe(), "index.js"].join(" "), + install: [bunExe(), "foo-install.js"].join(" "), }, dependencies: { qux: "^0.0", @@ -928,7 +928,7 @@ it("should handle life-cycle scripts during re-installation", async () => { workspaces: ["bar"], }), ); - await writeFile(join(package_dir, "index.js"), 'console.log("[scripts:run] Foo");'); + await writeFile(join(package_dir, "foo-install.js"), 'await require("fs/promises").writeFile("foo.txt", "foo!");'); await mkdir(join(package_dir, "bar")); await writeFile( join(package_dir, "bar", "package.json"), @@ -936,11 +936,14 @@ it("should handle life-cycle scripts during re-installation", async () => { name: "Bar", version: "0.0.2", scripts: { - preinstall: [bunExe(), "index.js"].join(" "), + preinstall: [bunExe(), "bar-preinstall.js"].join(" "), }, }), ); - await writeFile(join(package_dir, "bar", "index.js"), 'console.log("[scripts:run] Bar");'); + await writeFile( + join(package_dir, "bar", "bar-preinstall.js"), + 'await require("fs/promises").writeFile("bar.txt", "bar!");', + ); const { stdout: stdout1, stderr: stderr1, @@ -955,16 +958,12 @@ it("should handle life-cycle scripts during re-installation", async () => { }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); - expect(err1).not.toContain("error:"); expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); expect(out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", " + qux@0.0.2", - "[scripts:run] Foo", - "[scripts:run] Qux", "", " 2 packages installed", ]); @@ -972,6 +971,8 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(requested).toBe(2); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar", "qux"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); await access(join(package_dir, "bun.lockb")); // Perform `bun install` again but with lockfile from before await rm(join(package_dir, "node_modules"), { force: true, recursive: true }); @@ -994,11 +995,8 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); expect(out2.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", " + qux@0.0.2", - "[scripts:run] Foo", - "[scripts:run] Qux", "", " 2 packages installed", ]); @@ -1006,6 +1004,8 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(requested).toBe(3); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar", "qux"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); await access(join(package_dir, "bun.lockb")); // Perform `bun install --production` with lockfile from before await rm(join(package_dir, "node_modules"), { force: true, recursive: true }); @@ -1028,11 +1028,8 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(stdout3).toBeDefined(); const out3 = await new Response(stdout3).text(); expect(out3.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", " + qux@0.0.2", - "[scripts:run] Foo", - "[scripts:run] Qux", "", " 2 packages installed", ]); @@ -1040,6 +1037,8 @@ it("should handle life-cycle scripts during re-installation", async () => { expect(requested).toBe(4); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar", "qux"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); await access(join(package_dir, "bun.lockb")); }); @@ -1049,23 +1048,26 @@ it("should use updated life-cycle scripts in root during re-installation", async JSON.stringify({ name: "Foo", scripts: { - install: [bunExe(), "foo.js"].join(" "), + install: [bunExe(), "foo-install.js"].join(" "), }, workspaces: ["bar"], }), ); - await writeFile(join(package_dir, "foo.js"), 'console.log("[scripts:run] Foo");'); + await writeFile(join(package_dir, "foo-install.js"), 'await require("fs/promises").writeFile("foo.txt", "foo!");'); await mkdir(join(package_dir, "bar")); await writeFile( join(package_dir, "bar", "package.json"), JSON.stringify({ name: "Bar", scripts: { - preinstall: [bunExe(), "bar.js"].join(" "), + preinstall: [bunExe(), "bar-preinstall.js"].join(" "), }, }), ); - await writeFile(join(package_dir, "bar", "bar.js"), 'console.log("[scripts:run] Bar");'); + await writeFile( + join(package_dir, "bar", "bar-preinstall.js"), + 'await require("fs/promises").writeFile("bar.txt", "bar!");', + ); const { stdout: stdout1, stderr: stderr1, @@ -1085,9 +1087,7 @@ it("should use updated life-cycle scripts in root during re-installation", async expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); expect(out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", - "[scripts:run] Foo", "", " 1 package installed", ]); @@ -1095,7 +1095,10 @@ it("should use updated life-cycle scripts in root during re-installation", async expect(requested).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); await access(join(package_dir, "bun.lockb")); + // Perform `bun install` with outdated lockfile await rm(join(package_dir, "node_modules"), { force: true, recursive: true }); await writeFile( @@ -1103,13 +1106,17 @@ it("should use updated life-cycle scripts in root during re-installation", async JSON.stringify({ name: "Foo", scripts: { - install: [bunExe(), "moo.js"].join(" "), - postinstall: [bunExe(), "foo.js"].join(" "), + install: [bunExe(), "foo-install2.js"].join(" "), + postinstall: [bunExe(), "foo-postinstall.js"].join(" "), }, workspaces: ["bar"], }), ); - await writeFile(join(package_dir, "moo.js"), 'console.log("[scripts:run] Moo");'); + await writeFile(join(package_dir, "foo-install2.js"), 'await require("fs/promises").writeFile("foo2.txt", "foo2!");'); + await writeFile( + join(package_dir, "foo-postinstall.js"), + 'await require("fs/promises").writeFile("foo-postinstall.txt", "foo!");', + ); const { stdout: stdout2, stderr: stderr2, @@ -1129,10 +1136,7 @@ it("should use updated life-cycle scripts in root during re-installation", async expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); expect(out2.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", - "[scripts:run] Moo", - "[scripts:run] Foo", "", " 1 package installed", ]); @@ -1140,6 +1144,10 @@ it("should use updated life-cycle scripts in root during re-installation", async expect(requested).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo2.txt")).text()).toBe("foo2!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); + expect(await file(join(package_dir, "foo-postinstall.txt")).text()).toBe("foo!"); + await access(join(package_dir, "bun.lockb")); // Perform `bun install --production` with lockfile from before const bun_lockb = await file(join(package_dir, "bun.lockb")).arrayBuffer(); @@ -1160,13 +1168,11 @@ it("should use updated life-cycle scripts in root during re-installation", async const err3 = await new Response(stderr3).text(); expect(err3).not.toContain("error:"); expect(err3).not.toContain("Saved lockfile"); + expect(stdout3).toBeDefined(); const out3 = await new Response(stdout3).text(); expect(out3.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", - "[scripts:run] Moo", - "[scripts:run] Foo", "", " 1 package installed", ]); @@ -1175,6 +1181,9 @@ it("should use updated life-cycle scripts in root during re-installation", async expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual(["Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); expect(await file(join(package_dir, "bun.lockb")).arrayBuffer()).toEqual(bun_lockb); + expect(await file(join(package_dir, "foo2.txt")).text()).toBe("foo2!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); + expect(await file(join(package_dir, "foo-postinstall.txt")).text()).toBe("foo!"); }); it("should use updated life-cycle scripts in dependency during re-installation", async () => { @@ -1183,23 +1192,26 @@ it("should use updated life-cycle scripts in dependency during re-installation", JSON.stringify({ name: "Foo", scripts: { - install: [bunExe(), "foo.js"].join(" "), + install: [bunExe(), "foo-install.js"].join(" "), }, workspaces: ["bar"], }), ); - await writeFile(join(package_dir, "foo.js"), 'console.log("[scripts:run] Foo");'); + await writeFile(join(package_dir, "foo-install.js"), "await require('fs/promises').writeFile('foo.txt', 'foo!');"); await mkdir(join(package_dir, "bar")); await writeFile( join(package_dir, "bar", "package.json"), JSON.stringify({ name: "Bar", scripts: { - preinstall: [bunExe(), "bar.js"].join(" "), + preinstall: [bunExe(), "bar-preinstall.js"].join(" "), }, }), ); - await writeFile(join(package_dir, "bar", "bar.js"), 'console.log("[scripts:run] Bar");'); + await writeFile( + join(package_dir, "bar", "bar-preinstall.js"), + 'await require("fs/promises").writeFile("bar.txt", "bar!");', + ); const { stdout: stdout1, stderr: stderr1, @@ -1219,9 +1231,7 @@ it("should use updated life-cycle scripts in dependency during re-installation", expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); expect(out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Bar", " + Bar@workspace:bar", - "[scripts:run] Foo", "", " 1 package installed", ]); @@ -1229,20 +1239,31 @@ it("should use updated life-cycle scripts in dependency during re-installation", expect(requested).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar.txt")).text()).toBe("bar!"); await access(join(package_dir, "bun.lockb")); // Perform `bun install` with outdated lockfile await rm(join(package_dir, "node_modules"), { force: true, recursive: true }); + await rm(join(package_dir, "foo.txt")); + await rm(join(package_dir, "bar", "bar.txt")); await writeFile( join(package_dir, "bar", "package.json"), JSON.stringify({ name: "Bar", scripts: { - preinstall: [bunExe(), "baz.js"].join(" "), - postinstall: [bunExe(), "bar.js"].join(" "), + preinstall: [bunExe(), "bar-preinstall.js"].join(" "), + postinstall: [bunExe(), "bar-postinstall.js"].join(" "), }, }), ); - await writeFile(join(package_dir, "bar", "baz.js"), 'console.log("[scripts:run] Baz");'); + await writeFile( + join(package_dir, "bar", "bar-preinstall.js"), + 'await require("fs/promises").writeFile("bar-preinstall.txt", "bar preinstall!");', + ); + await writeFile( + join(package_dir, "bar", "bar-postinstall.js"), + 'await require("fs/promises").writeFile("bar-postinstall.txt", "bar postinstall!");', + ); const { stdout: stdout2, stderr: stderr2, @@ -1262,10 +1283,7 @@ it("should use updated life-cycle scripts in dependency during re-installation", expect(stdout2).toBeDefined(); const out2 = await new Response(stdout2).text(); expect(out2.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Baz", " + Bar@workspace:bar", - "[scripts:run] Foo", - "[scripts:run] Bar", "", " 1 package installed", ]); @@ -1273,10 +1291,17 @@ it("should use updated life-cycle scripts in dependency during re-installation", expect(requested).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar-preinstall.txt")).text()).toBe("bar preinstall!"); + expect(await file(join(package_dir, "bar", "bar-postinstall.txt")).text()).toBe("bar postinstall!"); await access(join(package_dir, "bun.lockb")); + // Perform `bun install --production` with lockfile from before const bun_lockb = await file(join(package_dir, "bun.lockb")).arrayBuffer(); await rm(join(package_dir, "node_modules"), { force: true, recursive: true }); + await rm(join(package_dir, "foo.txt")); + await rm(join(package_dir, "bar", "bar-preinstall.txt")); + await rm(join(package_dir, "bar", "bar-postinstall.txt")); const { stdout: stdout3, stderr: stderr3, @@ -1296,10 +1321,7 @@ it("should use updated life-cycle scripts in dependency during re-installation", expect(stdout3).toBeDefined(); const out3 = await new Response(stdout3).text(); expect(out3.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - "[scripts:run] Baz", " + Bar@workspace:bar", - "[scripts:run] Foo", - "[scripts:run] Bar", "", " 1 package installed", ]); @@ -1308,6 +1330,9 @@ it("should use updated life-cycle scripts in dependency during re-installation", expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual(["Bar"]); expect(await readlink(join(package_dir, "node_modules", "Bar"))).toBe(join("..", "bar")); expect(await file(join(package_dir, "bun.lockb")).arrayBuffer()).toEqual(bun_lockb); + expect(await file(join(package_dir, "foo.txt")).text()).toBe("foo!"); + expect(await file(join(package_dir, "bar", "bar-preinstall.txt")).text()).toBe("bar preinstall!"); + expect(await file(join(package_dir, "bar", "bar-postinstall.txt")).text()).toBe("bar postinstall!"); }); it("should ignore workspaces within workspaces", async () => { @@ -3922,7 +3947,7 @@ cache = false join("..", "uglify-js", "bin", "uglifyjs"), ); await access(join(package_dir, "bun.lockb")); -}); +}, 20000); it("should consider peerDependencies during hoisting", async () => { const urls: string[] = []; @@ -5925,8 +5950,8 @@ cache = false }); expect(stderr1).toBeDefined(); const err1 = await new Response(stderr1).text(); - expect(err1).toContain("Saved lockfile"); expect(err1).not.toContain("error:"); + expect(err1).toContain("Saved lockfile"); expect(stdout1).toBeDefined(); const out1 = await new Response(stdout1).text(); expect(out1.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ @@ -6078,14 +6103,16 @@ cache = false }, 20000); it("should handle trustedDependencies", async () => { - const scripts = { - preinstall: `${bunExe()} echo.js preinstall`, - install: `${bunExe()} echo.js install`, - postinstall: `${bunExe()} echo.js postinstall`, - preprepare: `${bunExe()} echo.js preprepare`, - prepare: `${bunExe()} echo.js prepare`, - postprepare: `${bunExe()} echo.js postprepare`, - }; + function getScripts(name: string) { + return { + preinstall: `echo preinstall ${name}`, + install: `echo install ${name}`, + postinstall: `echo postinstall ${name}`, + preprepare: `echo preprepare ${name}`, + prepare: `echo prepare ${name}`, + postprepare: `echo postprepare ${name}`, + }; + } await writeFile( join(package_dir, "package.json"), JSON.stringify({ @@ -6102,18 +6129,16 @@ it("should handle trustedDependencies", async () => { const bar_package = JSON.stringify({ name: "bar", version: "0.2.0", - scripts, + scripts: getScripts("bar"), }); await writeFile(join(package_dir, "bar", "package.json"), bar_package); - await writeFile(join(package_dir, "bar", "echo.js"), "console.log(`bar|${process.argv[2]}|${import.meta.dir}`);"); await mkdir(join(package_dir, "moo")); const moo_package = JSON.stringify({ name: "moo", version: "0.3.0", - scripts, + scripts: getScripts("moo"), }); await writeFile(join(package_dir, "moo", "package.json"), moo_package); - await writeFile(join(package_dir, "moo", "echo.js"), "console.log(`moo|${process.argv[2]}|${import.meta.dir}`);"); const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], cwd: package_dir, @@ -6124,27 +6149,22 @@ it("should handle trustedDependencies", async () => { }); expect(stderr).toBeDefined(); const err = await new Response(stderr).text(); + expect(err).not.toContain("error:"); expect(err).toContain("Saved lockfile"); expect(stdout).toBeDefined(); const out = await new Response(stdout).text(); const moo_dir = await realpath(join(package_dir, "node_modules", "moo")); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - `moo|preinstall|${moo_dir}`, " + bar@bar", " + moo@moo", - `moo|install|${moo_dir}`, - `moo|postinstall|${moo_dir}`, - `moo|preprepare|${moo_dir}`, - `moo|prepare|${moo_dir}`, - `moo|postprepare|${moo_dir}`, "", " 2 packages installed", ]); expect(await exited).toBe(0); expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "bar", "moo"]); - expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["echo.js", "package.json"]); + expect(await readdirSorted(join(package_dir, "node_modules", "bar"))).toEqual(["package.json"]); expect(await file(join(package_dir, "node_modules", "bar", "package.json")).text()).toEqual(bar_package); - expect(await readdirSorted(join(package_dir, "node_modules", "moo"))).toEqual(["echo.js", "package.json"]); + expect(await readdirSorted(join(package_dir, "node_modules", "moo"))).toEqual(["package.json"]); expect(await file(join(package_dir, "node_modules", "moo", "package.json")).text()).toEqual(moo_package); await access(join(package_dir, "bun.lockb")); }); diff --git a/test/cli/install/migration/complex-workspace.test.ts b/test/cli/install/migration/complex-workspace.test.ts index ec4669a3b6..a860d142e5 100644 --- a/test/cli/install/migration/complex-workspace.test.ts +++ b/test/cli/install/migration/complex-workspace.test.ts @@ -44,7 +44,7 @@ test("the install succeeds", async () => { var subprocess = Bun.spawn([bunExe(), "reset.ts"], { env: bunEnv, cwd, - stdio: ["inherit", "inherit", "inherit"], + stdio: ["ignore", "ignore", "ignore"], }); await subprocess.exited; if (subprocess.exitCode != 0) { @@ -55,7 +55,7 @@ test("the install succeeds", async () => { subprocess = Bun.spawn([bunExe(), "install"], { env: bunEnv, cwd, - stdio: ["inherit", "inherit", "inherit"], + stdio: ["ignore", "ignore", "ignore"], }); await subprocess.exited; @@ -63,9 +63,7 @@ test("the install succeeds", async () => { cwd = false as any; throw new Error("Failed to install"); } - - console.log(cwd); -}); +}, 10000); // bun-types validate("node_modules/bun-types", "1.0.0"); diff --git a/test/cli/install/overrides.test.ts b/test/cli/install/overrides.test.ts index bd4ee5cf72..2fb6f71cc5 100644 --- a/test/cli/install/overrides.test.ts +++ b/test/cli/install/overrides.test.ts @@ -8,9 +8,9 @@ function install(cwd: string, args: string[]) { const exec = Bun.spawnSync({ cmd: [bunExe(), ...args], cwd, - stdout: "pipe", + stdout: "ignore", stdin: "ignore", - stderr: "inherit", + stderr: "ignore", env: bunEnv, }); if (exec.exitCode !== 0) { @@ -23,9 +23,9 @@ function installExpectFail(cwd: string, args: string[]) { const exec = Bun.spawnSync({ cmd: [bunExe(), ...args], cwd, - stdout: "pipe", + stdout: "ignore", stdin: "ignore", - stderr: "inherit", + stderr: "ignore", env: bunEnv, }); if (exec.exitCode === 0) { diff --git a/test/cli/install/qux-0.0.2.tgz b/test/cli/install/qux-0.0.2.tgz index 0a2e6be7c99e954ec9e2f3cd0bdccf7305182024..c8f73df78b6b8a4c57266ab06a272fade6d8ba8e 100644 GIT binary patch literal 286 zcmV+(0pb21iwFRKQ#WM*1MQXFO2jY_g?rtn7;>ecHBBcCtWV*!coDkUrEW}Wvy~Ou zcQ>(FMUV=z^#}7o7&x7fNr#i^WV4ZLb&<^orBrYZFR>mYWjMwNl*mh7hed&wOjK0{ ze9oc{3~noXU&``POEZz@(^jjA@9`e5@sH5O{@)L~^VaxFYF*sA0qsV|Bw~UYZ=lyT2aaOBB+w}>#c1_=QTes*3a|2hyj+|cRKV4yBP7eN$(R<%c zx!_BD=Km6_=zkOy^5B2Qc=G>u@RS1(BkvSh0P#bBoCV=Q^{%xhXtbaO8`bzacs1B4Z2$lO literal 283 zcmV+$0p$K4iwFSNU?XJ!1MQX1O2jY_#(VBlgq#$lJ4uri)~DFBcnEEFsn(<>TUn5O zcc(U0@K8ahe=xtx2a_qC=Hr|8y1v&3v&mKj04gcTtEx$Xi%BDd3CTc7E;u8=cm*;e za?PP;hH%s#N7249cA*d3_WOnN_#JQhN9d#ft!>PyxC?8%1Mfo#vEaYp(SHRcljy%F zMU@d)W0;TpbN_2+gX>Juxx)p$3AJzgW7zt^-jdyLqLT4%=^6j1P~8n**h$EUE+TD+C6 h8{4dF44Ho-6pzpOIjxRFB9ZvLyZ{$9d3pd2004Q(j0gY# diff --git a/test/cli/install/registry/bun-install-registry.test.ts b/test/cli/install/registry/bun-install-registry.test.ts index 50d191a781..ba8b962f75 100644 --- a/test/cli/install/registry/bun-install-registry.test.ts +++ b/test/cli/install/registry/bun-install-registry.test.ts @@ -1,8 +1,8 @@ import { file, spawn } from "bun"; -import { bunExe, bunEnv as env, ignoreMimallocWarning } from "harness"; +import { bunExe, bunEnv as env } from "harness"; import { join } from "path"; import { mkdtempSync, realpathSync } from "fs"; -import { rm, writeFile, mkdir, exists, cp } from "fs/promises"; +import { rm, writeFile, mkdir, exists, cp, readdir } from "fs/promises"; import { readdirSorted } from "../dummy.registry"; import { tmpdir } from "os"; import { fork, ChildProcess } from "child_process"; @@ -10,22 +10,22 @@ import { beforeAll, afterAll, beforeEach, afterEach, test, expect, describe } fr var verdaccioServer: ChildProcess; var testCounter: number = 0; -var port: number = 4784; +var port: number = 4873; var packageDir: string; -ignoreMimallocWarning({ beforeAll, afterAll }); - -beforeAll(async done => { +beforeAll(async () => { verdaccioServer = fork( await import.meta.resolve("verdaccio/bin/verdaccio"), ["-c", join(import.meta.dir, "verdaccio.yaml"), "-l", `${port}`], { silent: true, execPath: "bun" }, ); - verdaccioServer.on("message", (msg: { verdaccio_started: boolean }) => { - if (msg.verdaccio_started) { - done(); - } + await new Promise(done => { + verdaccioServer.on("message", (msg: { verdaccio_started: boolean }) => { + if (msg.verdaccio_started) { + done(); + } + }); }); }); @@ -467,9 +467,7 @@ test("it should install with missing bun.lockb, node_modules, and/or cache", asy expect(err).toContain("Saved lockfile"); expect(err).not.toContain("not found"); - if (!err.includes("mimalloc: warning")) { - expect(err).not.toContain("error:"); - } + expect(err).not.toContain("error:"); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ "", expect.stringContaining("Checked 19 installs across 23 packages (no changes)"), @@ -630,9 +628,7 @@ describe("hoisting", async () => { var out = await new Response(stdout).text(); expect(err).toContain("Saved lockfile"); expect(err).not.toContain("not found"); - if (!err.includes("mimalloc: warning")) { - expect(err).not.toContain("error:"); - } + expect(err).not.toContain("error:"); for (const dep of Object.keys(dependencies)) { expect(out).toContain(` + ${dep}@${dependencies[dep]}`); } @@ -654,9 +650,7 @@ describe("hoisting", async () => { out = await new Response(stdout).text(); expect(err).toContain("Saved lockfile"); expect(err).not.toContain("not found"); - if (!err.includes("mimalloc: warning")) { - expect(err).not.toContain("error:"); - } + expect(err).not.toContain("error:"); expect(out).not.toContain("package installed"); expect(out).toContain(`Checked ${Object.keys(dependencies).length * 2} installs across`); expect(await exited).toBe(0); @@ -792,9 +786,7 @@ describe("hoisting", async () => { var out = await new Response(stdout).text(); expect(err).toContain("Saved lockfile"); expect(err).not.toContain("not found"); - if (!err.includes("mimalloc: warning")) { - expect(err).not.toContain("error:"); - } + expect(err).not.toContain("error:"); for (const dep of Object.keys(dependencies)) { expect(out).toContain(` + ${dep}@${dependencies[dep]}`); } @@ -816,9 +808,7 @@ describe("hoisting", async () => { out = await new Response(stdout).text(); expect(err).toContain("Saved lockfile"); expect(err).not.toContain("not found"); - if (!err.includes("mimalloc: warning")) { - expect(err).not.toContain("error:"); - } + expect(err).not.toContain("error:"); if (out.includes("installed")) { console.log("stdout:", out); } @@ -841,9 +831,7 @@ describe("hoisting", async () => { out = await new Response(stdout).text(); expect(err).not.toContain("Saved lockfile"); expect(err).not.toContain("not found"); - if (!err.includes("mimalloc: warning")) { - expect(err).not.toContain("error:"); - } + expect(err).not.toContain("error:"); expect(out).not.toContain("package installed"); expect(await exited).toBe(0); expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected); @@ -1326,107 +1314,1270 @@ test("missing package on reinstall, some with binaries", async () => { ).toBe(join(packageDir, "node_modules", "uses-what-bin", "node_modules", ".bin", "what-bin")); }); -test("it should install and use correct binary version", async () => { - // this should install `what-bin` in two places: - // - // - node_modules/.bin/what-bin@1.5.0 - // - node_modules/uses-what-bin/node_modules/.bin/what-bin@1.0.0 +for (const forceWaiterThread of [false, true]) { + const testEnv = forceWaiterThread ? { ...env, BUN_FEATURE_FLAG_FORCE_WAITER_THREAD: "1" } : env; + describe("lifecycle scripts" + (forceWaiterThread ? " (waiter thread)" : ""), async () => { + test("root package with all lifecycle scripts", async () => { + const writeScript = async (name: string) => { + const contents = ` + import { writeFileSync, existsSync, rmSync } from "fs"; + import { join } from "path"; + + const file = join(import.meta.dir, "${name}.txt"); + + if (existsSync(file)) { + rmSync(file); + writeFileSync(file, "${name} exists!"); + } else { + writeFileSync(file, "${name}!"); + } + `; + await writeFile(join(packageDir, `${name}.js`), contents); + }; + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + preinstall: `${bunExe()} preinstall.js`, + install: `${bunExe()} install.js`, + postinstall: `${bunExe()} postinstall.js`, + preprepare: `${bunExe()} preprepare.js`, + prepare: `${bunExe()} prepare.js`, + postprepare: `${bunExe()} postprepare.js`, + }, + }), + ); + + await writeScript("preinstall"); + await writeScript("install"); + await writeScript("postinstall"); + await writeScript("preprepare"); + await writeScript("prepare"); + await writeScript("postprepare"); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + expect(await exited).toBe(0); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "preprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postprepare.txt"))).toBeTrue(); + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(packageDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare!"); + expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare!"); + + // add a dependency with all lifecycle scripts + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + preinstall: `${bunExe()} preinstall.js`, + install: `${bunExe()} install.js`, + postinstall: `${bunExe()} postinstall.js`, + preprepare: `${bunExe()} preprepare.js`, + prepare: `${bunExe()} prepare.js`, + postprepare: `${bunExe()} postprepare.js`, + }, + dependencies: { + "all-lifecycle-scripts": "1.0.0", + }, + trustedDependencies: ["all-lifecycle-scripts"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(await exited).toBe(0); + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + all-lifecycle-scripts@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall exists!"); + expect(await file(join(packageDir, "install.txt")).text()).toBe("install exists!"); + expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall exists!"); + expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare exists!"); + expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare exists!"); + expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare exists!"); + + const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); + + expect(await exists(join(depDir, "preinstall.txt"))).toBeTrue(); + expect(await exists(join(depDir, "install.txt"))).toBeTrue(); + expect(await exists(join(depDir, "postinstall.txt"))).toBeTrue(); + expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); + expect(await exists(join(depDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); + + expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + + await rm(join(packageDir, "preinstall.txt")); + await rm(join(packageDir, "install.txt")); + await rm(join(packageDir, "postinstall.txt")); + await rm(join(packageDir, "preprepare.txt")); + await rm(join(packageDir, "prepare.txt")); + await rm(join(packageDir, "postprepare.txt")); + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + // all at once + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + expect(await exited).toBe(0); + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + all-lifecycle-scripts@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + + expect(await file(join(packageDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(packageDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(packageDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(packageDir, "preprepare.txt")).text()).toBe("preprepare!"); + expect(await file(join(packageDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await file(join(packageDir, "postprepare.txt")).text()).toBe("postprepare!"); + + expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + }, 10_000); + + test("workspace lifecycle scripts", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + workspaces: ["packages/*"], + scripts: { + preinstall: `touch preinstall.txt`, + install: `touch install.txt`, + postinstall: `touch postinstall.txt`, + preprepare: `touch preprepare.txt`, + prepare: `touch prepare.txt`, + postprepare: `touch postprepare.txt`, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "pkg1"), { recursive: true }); + await writeFile( + join(packageDir, "packages", "pkg1", "package.json"), + JSON.stringify({ + name: "pkg1", + version: "1.0.0", + scripts: { + preinstall: `touch preinstall.txt`, + install: `touch install.txt`, + postinstall: `touch postinstall.txt`, + preprepare: `touch preprepare.txt`, + prepare: `touch prepare.txt`, + postprepare: `touch postprepare.txt`, + }, + }), + ); + + await mkdir(join(packageDir, "packages", "pkg2"), { recursive: true }); + await writeFile( + join(packageDir, "packages", "pkg2", "package.json"), + JSON.stringify({ + name: "pkg2", + version: "1.0.0", + scripts: { + preinstall: `touch preinstall.txt`, + install: `touch install.txt`, + postinstall: `touch postinstall.txt`, + preprepare: `touch preprepare.txt`, + prepare: `touch prepare.txt`, + postprepare: `touch postprepare.txt`, + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(err).toContain("Saved lockfile"); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + pkg1@workspace:packages/pkg1", + " + pkg2@workspace:packages/pkg2", + "", + " 2 packages installed", + ]); + expect(await exited).toBe(0); + + expect(await exists(join(packageDir, "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "preprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "postprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "preprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg1", "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg1", "postprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg2", "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "postinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "preprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "packages", "pkg2", "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "packages", "pkg2", "postprepare.txt"))).toBeFalse(); + }); + + test("dependency lifecycle scripts run before root lifecycle scripts", async () => { + const script = '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]'; + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin-slow": "1.0.0", + }, + trustedDependencies: ["uses-what-bin-slow"], + scripts: { + install: script, + postinstall: script, + preinstall: script, + prepare: script, + postprepare: script, + preprepare: script, + }, + }), + ); + + // uses-what-bin-slow will wait one second then write a file to disk. The root package should wait for + // for this to happen before running its lifecycle scripts. + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exited).toBe(0); + }); + + test("install a dependency with lifecycle scripts, then add to trusted dependencies and install again", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "all-lifecycle-scripts": "1.0.0", + }, + trustedDependencies: [], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + all-lifecycle-scripts@1.0.0", + "", + " 1 package installed", + ]); + + const depDir = join(packageDir, "node_modules", "all-lifecycle-scripts"); + expect(await exists(join(depDir, "preinstall.txt"))).toBeFalse(); + expect(await exists(join(depDir, "install.txt"))).toBeFalse(); + expect(await exists(join(depDir, "postinstall.txt"))).toBeFalse(); + expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); + expect(await exists(join(depDir, "prepare.txt"))).toBeTrue(); + expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await exited).toBe(0); + + // add to trusted dependencies + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "all-lifecycle-scripts": "1.0.0", + }, + trustedDependencies: ["all-lifecycle-scripts"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + expect.stringContaining("Checked 1 install across 2 packages (no changes)"), + ]); + + expect(await file(join(depDir, "preinstall.txt")).text()).toBe("preinstall!"); + expect(await file(join(depDir, "install.txt")).text()).toBe("install!"); + expect(await file(join(depDir, "postinstall.txt")).text()).toBe("postinstall!"); + expect(await file(join(depDir, "prepare.txt")).text()).toBe("prepare!"); + expect(await exists(join(depDir, "preprepare.txt"))).toBeFalse(); + expect(await exists(join(depDir, "postprepare.txt"))).toBeFalse(); + }); + + test("adding a package without scripts to trustedDependencies", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "what-bin": "1.0.0", + }, + trustedDependencies: ["what-bin"], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + what-bin@1.0.0", + "", + " 1 package installed", + ]); + expect(await exited).toBe(0); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(["what-bin"]); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { "what-bin": "1.0.0" }, + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + what-bin@1.0.0", + "", + " 1 package installed", + ]); + expect(await exited).toBe(0); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(["what-bin"]); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(["what-bin"]); + + // add it to trusted dependencies + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "what-bin": "1.0.0", + }, + trustedDependencies: ["what-bin"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + "", + "Checked 1 install across 2 packages (no changes)", + ]); + expect(await exited).toBe(0); + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bin", ".cache", "what-bin"]); + expect(await readdirSorted(join(packageDir, "node_modules", ".bin"))).toEqual(["what-bin"]); + }); + + test("lifecycle scripts run if node_modules is deleted", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-postinstall": "1.0.0", + }, + trustedDependencies: ["lifecycle-postinstall"], + }), + ); + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + expect(stderr).toBeDefined(); + var err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + lifecycle-postinstall@1.0.0", + "", + // @ts-ignore + expect.stringContaining("1 package installed"), + ]); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); + expect(await exited).toBe(0); + await rm(join(packageDir, "node_modules"), { force: true, recursive: true }); + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + expect(stderr).toBeDefined(); + err = await new Response(stderr).text(); + expect(stdout).toBeDefined(); + out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + lifecycle-postinstall@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(await exists(join(packageDir, "node_modules", "lifecycle-postinstall", "postinstall.txt"))).toBeTrue(); + expect(await exited).toBe(0); + }); + + test("INIT_CWD is set to the correct directory", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + scripts: { + install: "bun install.js", + }, + dependencies: { + "lifecycle-init-cwd": "1.0.0", + "another-init-cwd": "npm:lifecycle-init-cwd@1.0.0", + }, + trustedDependencies: ["lifecycle-init-cwd", "another-init-cwd"], + }), + ); + + await writeFile( + join(packageDir, "install.js"), + ` + const fs = require("fs"); + const path = require("path"); + + fs.writeFileSync( + path.join(__dirname, "test.txt"), + process.env.INIT_CWD || "does not exist" + ); + `, + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + const out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + another-init-cwd@1.0.0", + " + lifecycle-init-cwd@1.0.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await file(join(packageDir, "test.txt")).text()).toBe(packageDir + "/"); + expect(await file(join(packageDir, "node_modules/lifecycle-init-cwd/test.txt")).text()).toBe(packageDir + "/"); + expect(await file(join(packageDir, "node_modules/another-init-cwd/test.txt")).text()).toBe(packageDir + "/"); + }); + + test("failing lifecycle script should print output", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-failing-postinstall": "1.0.0", + }, + trustedDependencies: ["lifecycle-failing-postinstall"], + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + expect(await exited).toBe(1); + + const err = await new Response(stderr).text(); + expect(err).toContain("hello"); + expect(await exited).toBe(1); + const out = await new Response(stdout).text(); + expect(out).toBeEmpty(); + }); + + test("--ignore-scripts should skip lifecycle scripts", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + dependencies: { + "lifecycle-failing-postinstall": "1.0.0", + }, + trustedDependencies: ["lifecycle-failing-postinstall"], + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--ignore-scripts"], + cwd: packageDir, + stdout: "pipe", + stderr: "pipe", + stdin: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("hello"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + lifecycle-failing-postinstall@1.0.0", + "", + " 1 package installed", + ]); + expect(await exited).toBe(0); + }); + + test("it should add `node-gyp rebuild` as the `install` script when `install` and `postinstall` don't exist and `binding.gyp` exists in the root of the package", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "binding-gyp-scripts": "1.5.0", + }, + trustedDependencies: ["binding-gyp-scripts"], + }), + ); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + binding-gyp-scripts@1.5.0", + "", + expect.stringContaining("2 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules/binding-gyp-scripts/build.node"))).toBeTrue(); + }); + + test("automatic node-gyp scripts should not run for untrusted dependencies, and should run after adding to `trustedDependencies`", async () => { + const packageJSON: any = { + name: "foo", + version: "1.0.0", + dependencies: { + "binding-gyp-scripts": "1.5.0", + }, + }; + await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJSON)); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + binding-gyp-scripts@1.5.0", + "", + expect.stringContaining("2 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeFalse(); + + packageJSON.trustedDependencies = ["binding-gyp-scripts"]; + await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJSON)); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "binding-gyp-scripts", "build.node"))).toBeTrue(); + }); + + test("automatic node-gyp scripts work in package root", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "node-gyp": "1.5.0", + }, + }), + ); + + await writeFile(join(packageDir, "binding.gyp"), ""); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + node-gyp@1.5.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); + + await rm(join(packageDir, "build.node")); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); + }); + + test("auto node-gyp scripts work when scripts exists other than `install` and `postinstall`", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "node-gyp": "1.5.0", + }, + scripts: { + preinstall: "exit 0", + prepare: "exit 0", + postprepare: "exit 0", + }, + }), + ); + + await writeFile(join(packageDir, "binding.gyp"), ""); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + node-gyp@1.5.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "build.node"))).toBeTrue(); + }); + + for (const script of ["install", "postinstall"]) { + test(`does not add auto node-gyp script when ${script} script exists`, async () => { + const packageJSON: any = { + name: "foo", + version: "1.0.0", + dependencies: { + "node-gyp": "1.5.0", + }, + scripts: { + [script]: "exit 0", + }, + }; + await writeFile(join(packageDir, "package.json"), JSON.stringify(packageJSON)); + await writeFile(join(packageDir, "binding.gyp"), ""); + + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + node-gyp@1.5.0", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "build.node"))).toBeFalse(); + }); + } + + test("git dependencies also run `preprepare`, `prepare`, and `postprepare` scripts", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-install-test": "dylan-conway/lifecycle-install-test#3ba6af5b64f2d27456e08df21d750072dffd3eee", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + lifecycle-install-test@github:dylan-conway/lifecycle-install-test#3ba6af5", + "", + expect.stringContaining("1 package installed"), + ]); + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preinstall.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "install.txt"))).toBeFalse(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postinstall.txt"))).toBeFalse(); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "lifecycle-install-test": "dylan-conway/lifecycle-install-test#3ba6af5b64f2d27456e08df21d750072dffd3eee", + }, + trustedDependencies: ["lifecycle-install-test"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env: testEnv, + })); + + expect(await exited).toBe(0); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "prepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postprepare.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "preinstall.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "install.txt"))).toBeTrue(); + expect(await exists(join(packageDir, "node_modules", "lifecycle-install-test", "postinstall.txt"))).toBeTrue(); + }); + + test("root lifecycle scripts should wait for dependency lifecycle scripts", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin-slow": "1.0.0", + }, + trustedDependencies: ["uses-what-bin-slow"], + scripts: { + install: '[[ -f "./node_modules/uses-what-bin-slow/what-bin.txt" ]]', + }, + }), + ); + + // Package `uses-what-bin-slow` has an install script that will sleep for 1 second + // before writing `what-bin.txt` to disk. The root package has an install script that + // checks if this file exists. If the root package install script does not wait for + // the other to finish, it will fail. + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env: testEnv, + }); + + const err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + const out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + uses-what-bin-slow@1.0.0", + "", + " 2 packages installed", + ]); + expect(await exited).toBe(0); + }); + + // test("stress test", async () => { + // // 1000 versions of the same package, and 1000 different packages each depending on one + // // of the versions. This creates a node_modules folder for 999 of the package + // // versions (minus 1 because one is hoisted) with none depending on another. This allows + // // lifecycle scripts for each package to run in parallel if --lifecycle-script-jobs is set + // // high enough. + // const totalPackageVersions = 1000; + // const maxJobs = 400; + // var dependencies: any = {}; + // for (var i = 0; i < totalPackageVersions; i++) { + // dependencies[`uses-postinstall-stress-test-1-0-${i}`] = `1.0.${i}`; + // } + + // await writeFile( + // join(packageDir, "package.json"), + // JSON.stringify({ + // name: "foo", + // version: "1.0.0", + // dependencies, + // trustedDependencies: ["postinstall-stress-test"], + // }), + // ); + + // var { stdout, stderr, exited } = spawn({ + // cmd: [bunExe(), "install", `--lifecycle-script-jobs=${maxJobs}`], + // cwd: packageDir, + // stdout: "pipe", + // stdin: "pipe", + // stderr: "pipe", + // env: testEnv, + // }); + + // const err = await new Response(stderr).text(); + // expect(await exited).toBe(0); + // expect(err).toContain("Saved lockfile"); + // expect(err).not.toContain("not found"); + // expect(err).not.toContain("error:"); + + // await rm(join(packageDir, "node_modules", ".cache"), { recursive: true, force: true }); + // expect((await readdir(join(packageDir, "node_modules"), { recursive: true })).sort()).toMatchSnapshot(); + // }, 10_000); + + test("it should install and use correct binary version", async () => { + // this should install `what-bin` in two places: + // + // - node_modules/.bin/what-bin@1.5.0 + // - node_modules/uses-what-bin/node_modules/.bin/what-bin@1.0.0 + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin": "1.0.0", + "what-bin": "1.5.0", + }, + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + var out = await new Response(stdout).text(); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + uses-what-bin@1.0.0", + " + what-bin@1.5.0", + "", + expect.stringContaining("3 packages installed"), + ]); + expect(await exited).toBe(0); + expect(await file(join(packageDir, "node_modules", ".bin", "what-bin")).text()).toContain("what-bin@1.5.0"); + expect( + await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", ".bin", "what-bin")).text(), + ).toContain("what-bin@1.0.0"); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb")); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "uses-what-bin": "1.5.0", + "what-bin": "1.0.0", + }, + scripts: { + install: "what-bin", + }, + trustedDependencies: ["uses-what-bin"], + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + expect(await exited).toBe(0); + expect(await file(join(packageDir, "node_modules", ".bin", "what-bin")).text()).toContain("what-bin@1.0.0"); + expect( + await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", ".bin", "what-bin")).text(), + ).toContain("what-bin@1.5.0"); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + out = await new Response(stdout).text(); + err = await new Response(stderr).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("not found"); + expect(err).not.toContain("error:"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + uses-what-bin@1.5.0", + " + what-bin@1.0.0", + "", + expect.stringContaining("3 packages installed"), + ]); + expect(await exited).toBe(0); + }); + }); +} + +test("it should be able to find binary in node_modules/.bin from parent directory of root package", async () => { + await mkdir(join(packageDir, "node_modules", ".bin"), { recursive: true }); + await mkdir(join(packageDir, "morePackageDir")); await writeFile( - join(packageDir, "package.json"), + join(packageDir, "morePackageDir", "package.json"), JSON.stringify({ name: "foo", version: "1.0.0", + scripts: { + install: "missing-bin", + }, dependencies: { - "uses-what-bin": "1.0.0", - "what-bin": "1.5.0", + "what-bin": "1.0.0", }, }), ); - var { stdout, stderr, exited } = spawn({ + await cp(join(packageDir, "bunfig.toml"), join(packageDir, "morePackageDir", "bunfig.toml")); + + await await writeFile( + join(packageDir, "node_modules", ".bin", "missing-bin"), + `#!/usr/bin/env node +require("fs").writeFileSync("missing-bin.txt", "missing-bin@WHAT"); +`, + { mode: 0o777 }, + ); + + const { stdout, stderr, exited } = spawn({ cmd: [bunExe(), "install"], - cwd: packageDir, + cwd: join(packageDir, "morePackageDir"), stdout: "pipe", stdin: "pipe", stderr: "pipe", env, }); - var err = await new Response(stderr).text(); + const err = await new Response(stderr).text(); expect(err).toContain("Saved lockfile"); expect(err).not.toContain("not found"); expect(err).not.toContain("error:"); - var out = await new Response(stdout).text(); + const out = await new Response(stdout).text(); expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - " + uses-what-bin@1.0.0", - " + what-bin@1.5.0", - "", - expect.stringContaining("3 packages installed"), - ]); - expect(await exited).toBe(0); - expect(await file(join(packageDir, "node_modules", ".bin", "what-bin")).text()).toContain("what-bin@1.5.0"); - expect( - await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", ".bin", "what-bin")).text(), - ).toContain("what-bin@1.0.0"); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - await rm(join(packageDir, "bun.lockb")); - - await writeFile( - join(packageDir, "package.json"), - JSON.stringify({ - name: "foo", - version: "1.0.0", - dependencies: { - "uses-what-bin": "1.5.0", - "what-bin": "1.0.0", - }, - scripts: { - install: "what-bin", - }, - trustedDependencies: ["uses-what-bin"], - }), - ); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - expect(await exited).toBe(0); - expect(await file(join(packageDir, "node_modules", ".bin", "what-bin")).text()).toContain("what-bin@1.0.0"); - expect( - await file(join(packageDir, "node_modules", "uses-what-bin", "node_modules", ".bin", "what-bin")).text(), - ).toContain("what-bin@1.5.0"); - - await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); - - ({ stdout, stderr, exited } = spawn({ - cmd: [bunExe(), "install"], - cwd: packageDir, - stdout: "pipe", - stdin: "pipe", - stderr: "pipe", - env, - })); - - out = await new Response(stdout).text(); - err = await new Response(stderr).text(); - expect(err).not.toContain("Saved lockfile"); - expect(err).not.toContain("not found"); - expect(err).not.toContain("error:"); - expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ - " + uses-what-bin@1.5.0", " + what-bin@1.0.0", "", - expect.stringContaining("3 packages installed"), + expect.stringContaining("1 package installed"), ]); expect(await exited).toBe(0); + expect(await file(join(packageDir, "morePackageDir", "missing-bin.txt")).text()).toBe("missing-bin@WHAT"); }); describe("semver", () => { @@ -3013,4 +4164,61 @@ describe("yarn tests", () => { expect(err).toBeEmpty(); expect(await exited).toBe(0); }); + + test("it should reinstall and rebuild dependencies deleted by the user on the next install", async () => { + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "foo", + version: "1.0.0", + dependencies: { + "no-deps-scripted": "1.0.0", + "one-dep-scripted": "1.5.0", + }, + trustedDependencies: ["no-deps-scripted", "one-dep-scripted"], + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--dev"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("not found"); + expect(out.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "").split(/\r?\n/)).toEqual([ + " + no-deps-scripted@1.0.0", + " + one-dep-scripted@1.5.0", + "", + expect.stringContaining("4 packages installed"), + ]); + expect(await exists(join(packageDir, "node_modules/one-dep-scripted/success.txt"))).toBeTrue(); + expect(await exited).toBe(0); + + await rm(join(packageDir, "node_modules/one-dep-scripted"), { recursive: true, force: true }); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "--dev"], + cwd: packageDir, + stdout: null, + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).not.toContain("Saved lockfile"); + expect(err).not.toContain("error:"); + expect(err).not.toContain("not found"); + expect(await exists(join(packageDir, "node_modules/one-dep-scripted/success.txt"))).toBeTrue(); + expect(await exited).toBe(0); + }); }); diff --git a/test/cli/install/registry/packages/.verdaccio-db.json b/test/cli/install/registry/packages/.verdaccio-db.json deleted file mode 100644 index 4deaf50177..0000000000 --- a/test/cli/install/registry/packages/.verdaccio-db.json +++ /dev/null @@ -1 +0,0 @@ -{"list":["basic-1","@babel/parser","@babel/traverse","@private/has-bin-entry","@private/package","@private/unconventional-tarball","@scoped/create-test-app","@scoped/has-bin-entry","@types/babel__traverse","@types/is-number","@types/no-deps","binding-gyp-scripts","broken-peer-deps","create-test-app","dep-loop-entry","dep-loop-exit","dev-deps","dragon-test-1-a","dragon-test-1-b","dragon-test-1-c","dragon-test-1-d","dragon-test-1-e","dragon-test-3-a","dragon-test-3-b","dragon-test-7-a","dragon-test-7-b","dragon-test-7-c","dragon-test-7-d","dragon-test-8-a","dragon-test-8-b","dragon-test-8-c","dragon-test-8-d","dragon-test-11-a","dragon-test-11-b","fallback-peer-deps","forward-peer-deps","forward-peer-deps-too","has-bin-entries","has-symlinks","has-types","hoisting-peer-check-child","hoisting-peer-check-parent","inject-node-gyp","invalid-main","is-number","left-pad","mismatched-peer-deps-lvl0","mismatched-peer-deps-lvl1","mismatched-peer-deps-lvl2","native","native-bar-x64","native-foo-x64","native-foo-x86","native-libc-glibc","native-libc-musl","no-deps","no-deps-backward-tags","no-deps-bins","no-deps-bins-esm","no-deps-browser-field","no-deps-build-metadata","no-deps-checked","no-deps-deprecated","no-deps-deprecated-empty","no-deps-deprecated-whitespace","no-deps-esm","no-deps-exports","no-deps-failing","no-deps-mjs","no-deps-nested-postinstall","no-deps-scripted","no-deps-scripted-bis","no-deps-scripted-empty","no-deps-scripted-to-fail","no-deps-scripted-to-deeply-fail","no-deps-tags","node-gyp","node-modules-path","one-deep1-dep-bins","one-deep2-dep-bins","one-dep-scripted","one-fixed-dep","one-fixed-dep-bins","one-fixed-dep-checked","one-fixed-dep-scripted","one-fixed-dep-with-types","one-range-dep","one-range-dep-too","optional-native","optional-peer-deps","optional-peer-deps-implicit","path-parse","peer-deps","peer-deps-fixed","peer-deps-lvl0","peer-deps-lvl1","peer-deps-lvl2","peer-deps-too","prefer-unplugged-false","prefer-unplugged-true","private-package","private-unconventional-tarball","provides-peer-deps-1-0-0","provides-peer-deps-1-0-0-too","provides-peer-deps-2-0-0","resolve","self-require-dep","self-require-trap","two-range-deps","unconventional-tarball","various-requires","vulnerable","vulnerable-dep","vulnerable-many","vulnerable-peer-deps","prereleases-1","prereleases-2","dep-with-tags","prereleases-3","hexo","prereleases-4","a-dep","uses-a-dep-1","uses-a-dep-2","uses-a-dep-3","uses-a-dep-4","uses-a-dep-5","uses-a-dep-6","uses-a-dep-7","uses-a-dep-8","uses-a-dep-9","uses-a-dep-10","peer-a-dep-star","peer-a-dep-caret-1-0-2","peer-a-dep-gte-1-0-2","peer-a-dep-1-0-2","peer-a-dep-tilde-1-0-2","peer-a-dep-gte-1-0-3"],"secret":"4f8fdf3ae3425c5661cbdd85990a5f44365b989c3c6f478ed48ccf5bae9e14b1"} \ No newline at end of file diff --git a/test/cli/install/registry/packages/all-lifecycle-scripts/all-lifecycle-scripts-1.0.0.tgz b/test/cli/install/registry/packages/all-lifecycle-scripts/all-lifecycle-scripts-1.0.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..c3bc7173a22116ff6d8ac706a6c3c06edcfdce22 GIT binary patch literal 536 zcmV+z0_Xi7iwFP!00002|LvH`PQx$|hI7tS3^}z@2~J2|3B;8b;0Z!-z>rP0*@XJ; zI7=P3Wl^g%g4%O&>ijcyRDUzJt2lXz@5vc0xQMgNf8@^Kh7iJ01Yh;n76l)j;wN>;C6J7hCd5xJt=4snw5sF3D!DjR4)~4# z5O4Ay28+n@f86}*D{i11poQEe4`^=2cq3*b(4Y{?E7e|BFRv`9E_0`xQ47{wm^5^eZnD4+0wh2dVx0p8x0e{D1uX4_f>8 zsQZT2{O|ZbR0`Oq{$pGJ4d8$H6tG_wFgFn>{@Yf75KxTQ^4@Wome-QL-|2IL|NuG$8=skf~Jp;P*4CtCf=c+JXr*v7PHn|i> pFP{aBpY8PvfEG)^QdFjRRQlR-TC=@qu~;mBfKR@p_k92k000E`rSAX$ literal 0 HcmV?d00001 diff --git a/test/cli/install/registry/packages/binding-gyp-scripts/package.json b/test/cli/install/registry/packages/binding-gyp-scripts/package.json index 875620040d..3a8e1a572a 100644 --- a/test/cli/install/registry/packages/binding-gyp-scripts/package.json +++ b/test/cli/install/registry/packages/binding-gyp-scripts/package.json @@ -21,16 +21,38 @@ "tarball": "http://localhost:4873/binding-gyp-scripts/-/binding-gyp-scripts-1.0.0.tgz" }, "contributors": [] + }, + "1.5.0": { + "name": "binding-gyp-scripts", + "version": "1.5.0", + "dependencies": { + "node-gyp": "1.5.0" + }, + "scripts": { + "install": "node-gyp rebuild" + }, + "gypfile": true, + "gitHead": "e0a78fdc023bcc2f4d739c6ba9d5c026a1b586ae", + "_id": "binding-gyp-scripts@1.5.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "8.19.4", + "dist": { + "integrity": "sha512-D0IqsFdZGOSu7ppwqjvV+oDaIGbTaZrWIT79xvxzH98y7HE+UvxRi8QUXQwJsoegyaRwPtYQDqRcjUCefifKxw==", + "shasum": "2824b5bb0810c2f0625a1a8a7e6f6726a04f2419", + "tarball": "http://localhost:4873/binding-gyp-scripts/-/binding-gyp-scripts-1.5.0.tgz" + }, + "contributors": [] } }, "time": { - "modified": "2023-11-01T22:04:59.983Z", + "modified": "2023-11-15T06:24:52.994Z", "created": "2023-11-01T22:04:59.983Z", - "1.0.0": "2023-11-01T22:04:59.983Z" + "1.0.0": "2023-11-01T22:04:59.983Z", + "1.5.0": "2023-11-15T06:24:52.994Z" }, "users": {}, "dist-tags": { - "latest": "1.0.0" + "latest": "1.5.0" }, "_uplinks": {}, "_distfiles": {}, @@ -38,6 +60,10 @@ "binding-gyp-scripts-1.0.0.tgz": { "shasum": "f74711cb59761e13ba9d2126a290bc847aab28c1", "version": "1.0.0" + }, + "binding-gyp-scripts-1.5.0.tgz": { + "shasum": "2824b5bb0810c2f0625a1a8a7e6f6726a04f2419", + "version": "1.5.0" } }, "_rev": "3-4ebd3d7005439d9e", diff --git a/test/cli/install/registry/packages/has-bin-entries/has-bin-entries-1.5.0.tgz b/test/cli/install/registry/packages/has-bin-entries/has-bin-entries-1.5.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..cd65abdeb953cd2ce94e220d1af208cdc4560de1 GIT binary patch literal 751 zcmVrz5G!UDj(_PDzpi|M#(+v~dy3c1Y9UaA9B{|9*vk*Xj70 z|D*B0W?s#OAEbfruvP`>c;0sgIu4vF|0zxb8PAb!{>Hr3{#ZEem)bL4b#eW4ax&bNkXR9*}+Nhe<)_=O!{2!Tr#;-6DMgAFY zN2IFYpN%wGaEH{jqi2q(h&j^%;$P|@9;6G*&EvDlKujVF zV-&*+p!WP~OO+s|CEY+p41j_=@}k`Ry1v0sn&ezG%TLxP-2k4R;Bnc?tO0h6=#!&C z;Vr014-OP~mKyxm_|JB59}@uf@K2qU`afj=n*XEn-zf9nXswfYy@H-1|HXg($^3hM zmH(dFFKGThUb2WS1|CwchVa@0J>}Fk-BuSEd%LC;TXN&*{ F006!>VyOTC literal 0 HcmV?d00001 diff --git a/test/cli/install/registry/packages/lifecycle-failing-postinstall-slow/package.json b/test/cli/install/registry/packages/lifecycle-failing-postinstall-slow/package.json new file mode 100644 index 0000000000..027c0205a6 --- /dev/null +++ b/test/cli/install/registry/packages/lifecycle-failing-postinstall-slow/package.json @@ -0,0 +1,41 @@ +{ + "name": "lifecycle-failing-postinstall-slow", + "versions": { + "1.0.0": { + "name": "lifecycle-failing-postinstall-slow", + "version": "1.0.0", + "scripts": { + "postinstall": "sleep 1 && echo hello && exit 1" + }, + "_id": "lifecycle-failing-postinstall-slow@1.0.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "10.2.0", + "dist": { + "integrity": "sha512-+tscCfiSu1CwL1HWDDrx7LIvPweMc88HUTm3VfhfDmMrtCSv8NGOZgnEy0cuGOlOl7Eqfg6O3XM11BprpsYb6A==", + "shasum": "823c78b85c961a7efb556c821281a3fd87f2776e", + "tarball": "http://localhost:4873/lifecycle-failing-postinstall-slow/-/lifecycle-failing-postinstall-slow-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2023-11-29T06:43:10.723Z", + "created": "2023-11-29T06:43:10.723Z", + "1.0.0": "2023-11-29T06:43:10.723Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "lifecycle-failing-postinstall-slow-1.0.0.tgz": { + "shasum": "823c78b85c961a7efb556c821281a3fd87f2776e", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "lifecycle-failing-postinstall-slow", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/lifecycle-failing-postinstall/lifecycle-failing-postinstall-1.0.0.tgz b/test/cli/install/registry/packages/lifecycle-failing-postinstall/lifecycle-failing-postinstall-1.0.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..3fa7466fe24a9e7d4bb7fafe425bf924609bfdc1 GIT binary patch literal 193 zcmV;y06za8iwFP!00002|LxF03&J222XN2(6b}zOwe_}^hrW%7+bq*DR}``D-jhLe z=s1eN-{r}lzoHIOon)uC!!pZb@Q$qv5k=F$r#z~$2DaM^tSTXpdbHR#NRxfRB}{Up ztE})y0mkoU-^>IRr{i`ePt^mSC{3yeP?IAt_xrZ~R}QdA4}8>rf;9f%p(eNhi!XC!vBPR&-Sc!eX)E{mbrzn( zG_8p*t#Wv+8oXU=*-7C&%x6aF&UidkR#(s11Llw%cPq!by^yg+A`wTu;G>(--T3nU z3N{;HQ-cKBIKY)6$5Q{1B=Xy|d;j;<`)~A-{m+W;{ulXa^#2&VdL>y9m7M`=RE=^i z8|*8sTwgAyEGWwsXh|6zA{iH4?7AeaY90A*c!_W9?~o9i%r$&oDa_Be={EWti9{lQ Nhc_VnYnK2F007Iiox7oHoo-op zcTedyhcVd=Au;y5;K1pBPEV8mp~;K){Ego=%2*+d&8sTem}pQarCFN7Z+S;L&mdYX zGs;+&QPfd3XZJ2fBe2CYYaC^OLn3gFe*{rCN?SOcmCrX%n z?>uo0rc*e?aGb$sR`}f;w@de19uIKi+P1(e!@D-u%Dg^~{teB^|30Gkl^lAHEB>F; z#ZUjUWy*s8$H18afXKY&iiP5$)u>R0v>YzEl++yxw^R~cx$9e-rOea WwZ`sW3kwU2KZX~Bbs(Am5C8xWBF!TJ literal 0 HcmV?d00001 diff --git a/test/cli/install/registry/packages/node-gyp/package.json b/test/cli/install/registry/packages/node-gyp/package.json index 152d547e4e..391a1eb2e2 100644 --- a/test/cli/install/registry/packages/node-gyp/package.json +++ b/test/cli/install/registry/packages/node-gyp/package.json @@ -17,16 +17,34 @@ "tarball": "http://localhost:4873/node-gyp/-/node-gyp-1.0.0.tgz" }, "contributors": [] + }, + "1.5.0": { + "name": "node-gyp", + "version": "1.5.0", + "bin": { + "node-gyp": "node-gyp.js" + }, + "gitHead": "e0a78fdc023bcc2f4d739c6ba9d5c026a1b586ae", + "_id": "node-gyp@1.5.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "8.19.4", + "dist": { + "integrity": "sha512-zb79rA2oVxlrxbZ2TqdAD8nAbgZq9u98U9i/kKGjlW6E62t9s/da03c+JVpC67BJOzB1qS+D2SpvO9l44FoyYg==", + "shasum": "e9585e24a8bd25e99ce960d2dae7e1ce6b756414", + "tarball": "http://localhost:4873/node-gyp/-/node-gyp-1.5.0.tgz" + }, + "contributors": [] } }, "time": { - "modified": "2023-11-01T22:05:22.357Z", + "modified": "2023-11-15T06:24:01.661Z", "created": "2023-11-01T22:05:22.357Z", - "1.0.0": "2023-11-01T22:05:22.357Z" + "1.0.0": "2023-11-01T22:05:22.357Z", + "1.5.0": "2023-11-15T06:24:01.661Z" }, "users": {}, "dist-tags": { - "latest": "1.0.0" + "latest": "1.5.0" }, "_uplinks": {}, "_distfiles": {}, @@ -34,6 +52,10 @@ "node-gyp-1.0.0.tgz": { "shasum": "2090b8b9d65cc24afc6f7169a2d98abf6420c9f6", "version": "1.0.0" + }, + "node-gyp-1.5.0.tgz": { + "shasum": "e9585e24a8bd25e99ce960d2dae7e1ce6b756414", + "version": "1.5.0" } }, "_rev": "3-9adf8a3d5d3c73ec", diff --git a/test/cli/install/registry/packages/one-dep-scripted/one-dep-scripted-1.5.0.tgz b/test/cli/install/registry/packages/one-dep-scripted/one-dep-scripted-1.5.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..b739a221554af1950b6564b726fb7b1352b5414b GIT binary patch literal 340 zcmV-a0jvHWiwFP!00002|Lv4rZ-OushPmcfoN$GSU{S$j%kFl+A7D&CIBxZ$Wi1#N z<-hM>-JrQrz_zSmr#pN50dybXF4Qlz~{ z*Ih^jelCTsV1*wb7cfl=ERo)h$T{k?547JtcS0qix`@qsuy+B@n z{^ev-ip}6~fPK?5-2gW#rX(~wI|XfNaDc{bT*q?F>5eb%`p*6zPFUCfQ9O>V|L=iu zk|(kadJo`9fdP3Y(7EDeg@xBQ`7J8VrD#i{Fb>&Q{M`MNG-}7pTc(3K7XgY&ndGVS mSkj#O&38Jjoc;*edfm2kRZO!rv&CYu{FqOdx%~713;+NE+^3-c literal 0 HcmV?d00001 diff --git a/test/cli/install/registry/packages/one-dep-scripted/package.json b/test/cli/install/registry/packages/one-dep-scripted/package.json index 93d0abf395..e050e952f9 100644 --- a/test/cli/install/registry/packages/one-dep-scripted/package.json +++ b/test/cli/install/registry/packages/one-dep-scripted/package.json @@ -20,16 +20,37 @@ "tarball": "http://localhost:4873/one-dep-scripted/-/one-dep-scripted-1.0.0.tgz" }, "contributors": [] + }, + "1.5.0": { + "name": "one-dep-scripted", + "version": "1.5.0", + "dependencies": { + "has-bin-entries": "1.5.0" + }, + "scripts": { + "install": "has-bin-entries" + }, + "gitHead": "e0a78fdc023bcc2f4d739c6ba9d5c026a1b586ae", + "_id": "one-dep-scripted@1.5.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "8.19.4", + "dist": { + "integrity": "sha512-mFVlLoVtKZ+FEUKvmKwB2fCMIlFjJ8jt/ugz8n3ahiiGZ5IsdfmezerZFuX/2z588DZcSR7ZkcO9t7Z0yG/SHA==", + "shasum": "f28892bbfd62143695757d7fa53eff067da0aa1e", + "tarball": "http://localhost:4873/one-dep-scripted/-/one-dep-scripted-1.5.0.tgz" + }, + "contributors": [] } }, "time": { - "modified": "2023-11-01T22:05:23.788Z", + "modified": "2023-11-15T01:33:17.813Z", "created": "2023-11-01T22:05:23.788Z", - "1.0.0": "2023-11-01T22:05:23.788Z" + "1.0.0": "2023-11-01T22:05:23.788Z", + "1.5.0": "2023-11-15T01:33:17.813Z" }, "users": {}, "dist-tags": { - "latest": "1.0.0" + "latest": "1.5.0" }, "_uplinks": {}, "_distfiles": {}, @@ -37,6 +58,10 @@ "one-dep-scripted-1.0.0.tgz": { "shasum": "fce9ca522efdeb8e9e0b3e205a25603f82e1c5fc", "version": "1.0.0" + }, + "one-dep-scripted-1.5.0.tgz": { + "shasum": "f28892bbfd62143695757d7fa53eff067da0aa1e", + "version": "1.5.0" } }, "_rev": "3-3ec6efca7c822777", diff --git a/test/cli/install/registry/packages/uses-what-bin-slow/package.json b/test/cli/install/registry/packages/uses-what-bin-slow/package.json new file mode 100644 index 0000000000..4988307337 --- /dev/null +++ b/test/cli/install/registry/packages/uses-what-bin-slow/package.json @@ -0,0 +1,44 @@ +{ + "name": "uses-what-bin-slow", + "versions": { + "1.0.0": { + "name": "uses-what-bin-slow", + "version": "1.0.0", + "scripts": { + "install": "sleep 1 && what-bin" + }, + "dependencies": { + "what-bin": "1.0.0" + }, + "_id": "uses-what-bin-slow@1.0.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "10.2.0", + "dist": { + "integrity": "sha512-/l5wILffL/epzl68C0NJPAxpTAd4P0Jyu911I2oI2XTNy8GzPdfHTNQ18GddYovPjaL+bQhopfgkmiHfF9/n2Q==", + "shasum": "af87d384ce8a007905c42d87989fd3ccd8fa9d6b", + "tarball": "http://localhost:4873/uses-what-bin-slow/-/uses-what-bin-slow-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2023-11-21T23:39:12.762Z", + "created": "2023-11-21T23:39:12.762Z", + "1.0.0": "2023-11-21T23:39:12.762Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "uses-what-bin-slow-1.0.0.tgz": { + "shasum": "af87d384ce8a007905c42d87989fd3ccd8fa9d6b", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "uses-what-bin-slow", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/uses-what-bin-slow/uses-what-bin-slow-1.0.0.tgz b/test/cli/install/registry/packages/uses-what-bin-slow/uses-what-bin-slow-1.0.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..7b18d0984980fd40c1602248f8eb0f66cf624fe3 GIT binary patch literal 202 zcmV;*05$&~iwFP!00002|Lu^y4uUWgMmzHqO$R2y7Nvu4Q{hIdwKQCi!SL=D5C-EW zF)`^`Zu8~z>yJ~#Rh?k#rkP%_vSejA=W@TNpYnlBN=kOQqaiJ5bglVA7pclN_~8&{Q4UOvW8*)Ri41k@A$!ITp2c4f=el z8bfXUuNpJRXaF1)G>+-q%SN$(6_nt>mS77FZ!L?zoH=cbdJ+f(0)a1i0$i{_4*&=N E05vdMFaQ7m literal 0 HcmV?d00001 diff --git a/test/cli/install/registry/packages/with-postinstall-a/package.json b/test/cli/install/registry/packages/with-postinstall-a/package.json new file mode 100644 index 0000000000..d2319f3db1 --- /dev/null +++ b/test/cli/install/registry/packages/with-postinstall-a/package.json @@ -0,0 +1,45 @@ +{ + "name": "with-postinstall-a", + "versions": { + "1.0.0": { + "name": "with-postinstall-a", + "version": "1.0.0", + "dependencies": { + "with-postinstall-b": "^1.0.0" + }, + "scripts": { + "postinstall": "node -e \"process.stdout.write(Number(new Date()).toString())\" >> output.json" + }, + "gitHead": "ee6e0734e911d48f6ff786aa0f05b9b9926e4815", + "_id": "with-postinstall-a@1.0.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "8.19.4", + "dist": { + "integrity": "sha512-jNHnSlC1/asdVm2mwuHTma7sEdRgkLk/kkp4hsDn75898oo1uZbPPAgJxeoXdAGg42CxHg/AkBXpIShkJsukpw==", + "shasum": "e2fe57ea4bb886e734914810467a548b1ec8519f", + "tarball": "http://localhost:4873/with-postinstall-a/-/with-postinstall-a-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2023-11-15T09:37:10.321Z", + "created": "2023-11-15T09:37:10.321Z", + "1.0.0": "2023-11-15T09:37:10.321Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "with-postinstall-a-1.0.0.tgz": { + "shasum": "e2fe57ea4bb886e734914810467a548b1ec8519f", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "with-postinstall-a", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/with-postinstall-a/with-postinstall-a-1.0.0.tgz b/test/cli/install/registry/packages/with-postinstall-a/with-postinstall-a-1.0.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..7bc0d3ea9d7ee0ac2e24f3ba8b56c9ee6f5adb0f GIT binary patch literal 253 zcmV`FZI!ebY;78WWDnPLW8gsA^IMPB&ETRNlwuw zT)d2@9ma*j37!qFmrI%aYl_zGIo}G{Y=G5)4f#V~_N~u#RVWk+g&)2F5)tv&00;m8 Dhdy@P literal 0 HcmV?d00001 diff --git a/test/cli/install/registry/packages/with-postinstall-b/package.json b/test/cli/install/registry/packages/with-postinstall-b/package.json new file mode 100644 index 0000000000..d9fa68c047 --- /dev/null +++ b/test/cli/install/registry/packages/with-postinstall-b/package.json @@ -0,0 +1,43 @@ +{ + "name": "with-postinstall-b", + "versions": { + "1.0.0": { + "name": "with-postinstall-b", + "version": "1.0.0", + "dependencies": {}, + "scripts": { + "postinstall": "node -e \"process.stdout.write(Number(new Date()).toString())\" >> output.json" + }, + "gitHead": "ee6e0734e911d48f6ff786aa0f05b9b9926e4815", + "_id": "with-postinstall-b@1.0.0", + "_nodeVersion": "21.1.0", + "_npmVersion": "8.19.4", + "dist": { + "integrity": "sha512-LEgpt3TM2NWMUAN1Z7xEpt7fNhhTU5O7c9W/sgaM/ckYMwE2G1sqeikL4Hmf0l5GvALyFfcGK1tiEoluRWUmiA==", + "shasum": "330e2372dcc4b79f5b2f59619f6a5d3ba54161b8", + "tarball": "http://localhost:4873/with-postinstall-b/-/with-postinstall-b-1.0.0.tgz" + }, + "contributors": [] + } + }, + "time": { + "modified": "2023-11-15T09:39:05.043Z", + "created": "2023-11-15T09:39:05.043Z", + "1.0.0": "2023-11-15T09:39:05.043Z" + }, + "users": {}, + "dist-tags": { + "latest": "1.0.0" + }, + "_uplinks": {}, + "_distfiles": {}, + "_attachments": { + "with-postinstall-b-1.0.0.tgz": { + "shasum": "330e2372dcc4b79f5b2f59619f6a5d3ba54161b8", + "version": "1.0.0" + } + }, + "_rev": "", + "_id": "with-postinstall-b", + "readme": "ERROR: No README data found!" +} \ No newline at end of file diff --git a/test/cli/install/registry/packages/with-postinstall-b/with-postinstall-b-1.0.0.tgz b/test/cli/install/registry/packages/with-postinstall-b/with-postinstall-b-1.0.0.tgz new file mode 100644 index 0000000000000000000000000000000000000000..3f644e077a5e0779088f5b3df1a5c0141ca0b1b2 GIT binary patch literal 247 zcmV#vx|1)p15 zZM3!^C<{j&bogopzWO x9%p#4vR*DVhbKr-T}tLS7HDN001Oha@PO= literal 0 HcmV?d00001 diff --git a/test/harness.ts b/test/harness.ts index d3f23e1e7a..cea6869e1b 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -15,10 +15,6 @@ export function bunExe() { return process.execPath; } -export function withoutMimalloc(input: string) { - return input.replaceAll(/^mimalloc warning:.*$/gm, ""); -} - export function nodeExe(): string | null { return which("node") || null; } @@ -160,24 +156,3 @@ export function bunRunAsScript(dir: string, script: string, env?: Record & Pick) { - const origResponseText = Response.prototype.text; - beforeAll(() => { - // @ts-expect-error - Response.prototype.text = async function () { - return withoutMimalloc(await origResponseText.call(this)); - }; - }); - - afterAll(() => { - // @ts-expect-error - Response.prototype.text = origResponseText; - }); -} diff --git a/test/integration/esbuild/build-file.js b/test/integration/esbuild/build-file.js new file mode 100644 index 0000000000..ef2aa534b4 --- /dev/null +++ b/test/integration/esbuild/build-file.js @@ -0,0 +1,2 @@ +console.log("hello"); +console.log("estrella"); diff --git a/test/integration/esbuild/esbuild.test.ts b/test/integration/esbuild/esbuild.test.ts new file mode 100644 index 0000000000..097deef2d0 --- /dev/null +++ b/test/integration/esbuild/esbuild.test.ts @@ -0,0 +1,205 @@ +import { describe, expect, test } from "bun:test"; +import { rm, writeFile, mkdir, exists, cp } from "fs/promises"; +import { bunExe, bunEnv as env } from "harness"; +import { mkdtempSync, realpathSync } from "fs"; +import { tmpdir } from "os"; +import { join } from "path"; +import { spawn } from "bun"; + +describe("esbuild integration test", () => { + test("install and use esbuild", async () => { + const packageDir = mkdtempSync(join(realpathSync(tmpdir()), "bun-esbuild-test-")); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "bun-esbuild-test", + version: "1.0.0", + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "esbuild@0.19.8"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(out).toContain("esbuild@0.19.8"); + expect(await exited).toBe(0); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "esbuild", "--version"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toContain("0.19.8"); + expect(await exited).toBe(0); + + await rm(packageDir, { recursive: true, force: true }); + }); + + test("install and use estrella", async () => { + const packageDir = mkdtempSync(join(realpathSync(tmpdir()), "bun-ebuild-estrella-test-")); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "bun-esbuild-estrella-test", + version: "1.0.0", + }), + ); + + var { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install", "estrella@1.4.1"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + + var err = await new Response(stderr).text(); + var out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(out).toContain("estrella@1.4.1"); + expect(await exited).toBe(0); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "estrella", "--estrella-version"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toContain("1.4.1"); + expect(await exited).toBe(0); + + await cp(join(import.meta.dir, "build-file.js"), join(packageDir, "build-file.js")); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "estrella", "build-file.js"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toBe('console.log("hello"),console.log("estrella");\n'); + expect(await exited).toBe(0); + + await rm(join(packageDir, "node_modules"), { recursive: true, force: true }); + await rm(join(packageDir, "bun.lockb"), { force: true }); + + await writeFile( + join(packageDir, "package.json"), + JSON.stringify({ + name: "bun-esbuild-estrella-test", + version: "1.0.0", + dependencies: { + "estrella": "1.4.1", + // different version of esbuild + "esbuild": "0.19.8", + }, + }), + ); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toContain("Saved lockfile"); + expect(out).toContain("estrella@1.4.1"); + expect(out).toContain("esbuild@0.19.8"); + expect(await exited).toBe(0); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "estrella", "--estrella-version"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toContain("1.4.1"); + expect(await exited).toBe(0); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "esbuild", "--version"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toContain("0.19.8"); + expect(await exited).toBe(0); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "esbuild", "--version"], + cwd: join(packageDir, "node_modules", "estrella"), + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toContain("0.11.23"); + + ({ stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "estrella", "build-file.js"], + cwd: packageDir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + })); + + err = await new Response(stderr).text(); + out = await new Response(stdout).text(); + expect(err).toBe(""); + expect(out).toBe('console.log("hello"),console.log("estrella");\n'); + expect(await exited).toBe(0); + + await rm(packageDir, { recursive: true, force: true }); + }); +});