From 150338faab12c2cb89e214c284634376b323759e Mon Sep 17 00:00:00 2001 From: Dylan Conway Date: Tue, 21 Oct 2025 14:18:39 -0700 Subject: [PATCH] implement `publicHoistPattern` and `hoistPattern` (#23567) ### What does this PR do? Adds support for `publicHoistPattern` in `bunfig.toml` and `public-hoist-pattern` from `.npmrc`. This setting allows you to select transitive packages to hoist to the root node_modules making them available for all workspace packages. ```toml [install] # can be a string publicHoistPattern = "@types*" # or an array publicHoistPattern = [ "@types*", "*eslint*" ] ``` `publicHoistPattern` only affects the isolated linker. --- Adds `hoistPattern`. `hoistPattern` is the same as `publicHoistPattern`, but applies to the `node_modules/.bun/node_modules` directory instead of the root node_modules. Also the default value of `hoistPattern` is `*` (everything is hoisted to `node_modules/.bun/node_modules` by default). --- Fixes a determinism issue constructing the `node_modules/.bun/node_modules` directory. --- closes #23481 closes #6160 closes #23548 ### How did you verify your code works? Added tests for - [x] only include patterns - [x] only exclude patterns - [x] mix of include and exclude - [x] errors for unexpected expression types - [x] excluding direct dependency (should still include) - [x] match all with `*` - [x] string and array expression types --------- Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- src/api/schema.zig | 4 + src/bun.js/bindings/RegularExpression.zig | 4 +- src/bun.js/jsc.zig | 1 + src/bun.js/test/jest.zig | 2 +- src/bun.zig | 2 - src/bunfig.zig | 24 + src/cli.zig | 2 +- src/cli/Arguments.zig | 2 +- src/collections/array_list.zig | 4 + src/ini.zig | 30 ++ .../PackageManager/PackageManagerOptions.zig | 11 + src/install/PnpmMatcher.zig | 198 +++++++++ src/install/install.zig | 1 + src/install/isolated_install.zig | 366 ++++++++------- src/install/isolated_install/Installer.zig | 86 ++-- src/install/isolated_install/Store.zig | 3 + src/js/internal-for-testing.ts | 7 + src/string/escapeRegExp.zig | 122 +++++ src/string/immutable.zig | 4 + test/cli/install/public-hoist-pattern.test.ts | 417 ++++++++++++++++++ test/harness.ts | 8 + test/js/bun/util/escapeRegExp.test.ts | 16 + 22 files changed, 1119 insertions(+), 195 deletions(-) create mode 100644 src/install/PnpmMatcher.zig create mode 100644 src/string/escapeRegExp.zig create mode 100644 test/cli/install/public-hoist-pattern.test.ts create mode 100644 test/js/bun/util/escapeRegExp.test.ts diff --git a/src/api/schema.zig b/src/api/schema.zig index ac564b7c7b..8e28eb94fd 100644 --- a/src/api/schema.zig +++ b/src/api/schema.zig @@ -3061,6 +3061,9 @@ pub const api = struct { minimum_release_age_ms: ?f64 = null, minimum_release_age_excludes: ?[]const []const u8 = null, + + public_hoist_pattern: ?install.PnpmMatcher = null, + hoist_pattern: ?install.PnpmMatcher = null, }; pub const ClientServerModule = struct { @@ -3223,4 +3226,5 @@ const std = @import("std"); const bun = @import("bun"); const OOM = bun.OOM; +const install = bun.install; const js_ast = bun.ast; diff --git a/src/bun.js/bindings/RegularExpression.zig b/src/bun.js/bindings/RegularExpression.zig index faf1fc36aa..88f940ba92 100644 --- a/src/bun.js/bindings/RegularExpression.zig +++ b/src/bun.js/bindings/RegularExpression.zig @@ -19,11 +19,11 @@ pub const RegularExpression = opaque { extern fn Yarr__RegularExpression__searchRev(this: *RegularExpression) i32; extern fn Yarr__RegularExpression__matches(this: *RegularExpression, string: bun.String) i32; - pub inline fn init(pattern: bun.String, flags: Flags) !*RegularExpression { + pub inline fn init(pattern: bun.String, flags: Flags) error{InvalidRegExp}!*RegularExpression { var regex = Yarr__RegularExpression__init(pattern, @intFromEnum(flags)); if (!regex.isValid()) { regex.deinit(); - return error.InvalidRegex; + return error.InvalidRegExp; } return regex; } diff --git a/src/bun.js/jsc.zig b/src/bun.js/jsc.zig index ee13a61d0f..53cac93cfc 100644 --- a/src/bun.js/jsc.zig +++ b/src/bun.js/jsc.zig @@ -85,6 +85,7 @@ pub const SourceProvider = @import("./bindings/SourceProvider.zig").SourceProvid pub const CatchScope = @import("./bindings/CatchScope.zig").CatchScope; pub const ExceptionValidationScope = @import("./bindings/CatchScope.zig").ExceptionValidationScope; pub const MarkedArgumentBuffer = @import("./bindings/MarkedArgumentBuffer.zig").MarkedArgumentBuffer; +pub const RegularExpression = @import("./bindings/RegularExpression.zig").RegularExpression; // JavaScript-related pub const Errorable = @import("./bindings/Errorable.zig").Errorable; diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 4f3b74eca9..0f34d3daa5 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -504,7 +504,6 @@ const ExpectTypeOf = expect.ExpectTypeOf; const bun = @import("bun"); const ArrayIdentityContext = bun.ArrayIdentityContext; const Output = bun.Output; -const RegularExpression = bun.RegularExpression; const default_allocator = bun.default_allocator; const logger = bun.logger; @@ -512,5 +511,6 @@ const jsc = bun.jsc; const CallFrame = jsc.CallFrame; const JSGlobalObject = jsc.JSGlobalObject; const JSValue = jsc.JSValue; +const RegularExpression = jsc.RegularExpression; const VirtualMachine = jsc.VirtualMachine; const ZigString = jsc.ZigString; diff --git a/src/bun.zig b/src/bun.zig index 7a5c884ec7..e7c09f62dd 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -1991,8 +1991,6 @@ pub const WTF = struct { pub const Wyhash11 = @import("./wyhash.zig").Wyhash11; -pub const RegularExpression = @import("./bun.js/bindings/RegularExpression.zig").RegularExpression; - const TODO_LOG = Output.scoped(.TODO, .visible); pub inline fn todo(src: std.builtin.SourceLocation, value: anytype) @TypeOf(value) { if (comptime Environment.allow_assert) { diff --git a/src/bunfig.zig b/src/bunfig.zig index 28afdc4e87..12c123522c 100644 --- a/src/bunfig.zig +++ b/src/bunfig.zig @@ -731,6 +731,30 @@ pub const Bunfig = struct { }, } } + + if (install_obj.get("publicHoistPattern")) |public_hoist_pattern_expr| { + install.public_hoist_pattern = bun.install.PnpmMatcher.fromExpr( + allocator, + public_hoist_pattern_expr, + this.log, + this.source, + ) catch |err| switch (err) { + error.OutOfMemory => |oom| return oom, + error.UnexpectedExpr, error.InvalidRegExp => return error.@"Invalid Bunfig", + }; + } + + if (install_obj.get("hoistPattern")) |hoist_pattern_expr| { + install.hoist_pattern = bun.install.PnpmMatcher.fromExpr( + allocator, + hoist_pattern_expr, + this.log, + this.source, + ) catch |err| switch (err) { + error.OutOfMemory => |oom| return oom, + error.UnexpectedExpr, error.InvalidRegExp => return error.@"Invalid Bunfig", + }; + } } if (json.get("run")) |run_expr| { diff --git a/src/cli.zig b/src/cli.zig index 10452773cb..76845a6e6d 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -1727,11 +1727,11 @@ const bun = @import("bun"); const Environment = bun.Environment; const Global = bun.Global; const Output = bun.Output; -const RegularExpression = bun.RegularExpression; const bun_js = bun.bun_js; const clap = bun.clap; const default_allocator = bun.default_allocator; const logger = bun.logger; const strings = bun.strings; const File = bun.sys.File; +const RegularExpression = bun.jsc.RegularExpression; const api = bun.schema.api; diff --git a/src/cli/Arguments.zig b/src/cli/Arguments.zig index 565ed59bf1..a87f471e99 100644 --- a/src/cli/Arguments.zig +++ b/src/cli/Arguments.zig @@ -1333,7 +1333,6 @@ const FeatureFlags = bun.FeatureFlags; const Global = bun.Global; const OOM = bun.OOM; const Output = bun.Output; -const RegularExpression = bun.RegularExpression; const clap = bun.clap; const js_ast = bun.ast; const logger = bun.logger; @@ -1341,6 +1340,7 @@ const options = bun.options; const resolve_path = bun.path; const strings = bun.strings; const Api = bun.schema.api; +const RegularExpression = bun.jsc.RegularExpression; const CLI = bun.cli; const Command = CLI.Command; diff --git a/src/collections/array_list.zig b/src/collections/array_list.zig index 8653989abd..40769bdc0c 100644 --- a/src/collections/array_list.zig +++ b/src/collections/array_list.zig @@ -119,6 +119,10 @@ pub fn ArrayListAlignedIn( }; } + pub fn writer(self: *Self) Unmanaged.Writer { + return self.#unmanaged.writer(self.getStdAllocator()); + } + /// Returns a borrowed version of the allocator. pub fn allocator(self: *const Self) bun.allocators.Borrowed(Allocator) { return bun.allocators.borrow(self.#allocator); diff --git a/src/ini.zig b/src/ini.zig index 891a5b0d09..e0a18dfcc6 100644 --- a/src/ini.zig +++ b/src/ini.zig @@ -1073,6 +1073,36 @@ pub fn loadNpmrc( } } + if (out.get("public-hoist-pattern")) |public_hoist_pattern_expr| { + install.public_hoist_pattern = bun.install.PnpmMatcher.fromExpr( + allocator, + public_hoist_pattern_expr, + log, + source, + ) catch |err| switch (err) { + error.OutOfMemory => |oom| return oom, + error.InvalidRegExp, error.UnexpectedExpr => patterns: { + log.reset(); + break :patterns null; + }, + }; + } + + if (out.get("hoist-pattern")) |hoist_pattern_expr| { + install.hoist_pattern = bun.install.PnpmMatcher.fromExpr( + allocator, + hoist_pattern_expr, + log, + source, + ) catch |err| switch (err) { + error.OutOfMemory => |oom| return oom, + error.InvalidRegExp, error.UnexpectedExpr => patterns: { + log.reset(); + break :patterns null; + }, + }; + } + var registry_map = install.scoped orelse bun.schema.api.NpmRegistryMap{}; // Process scopes diff --git a/src/install/PackageManager/PackageManagerOptions.zig b/src/install/PackageManager/PackageManagerOptions.zig index 9776790ba2..32ae941e07 100644 --- a/src/install/PackageManager/PackageManagerOptions.zig +++ b/src/install/PackageManager/PackageManagerOptions.zig @@ -71,6 +71,9 @@ depth: ?usize = null, /// isolated installs (pnpm-like) or hoisted installs (yarn-like, original) node_linker: NodeLinker = .auto, +public_hoist_pattern: ?bun.install.PnpmMatcher = null, +hoist_pattern: ?bun.install.PnpmMatcher = null, + // Security scanner module path security_scanner: ?[]const u8 = null, @@ -387,6 +390,14 @@ pub fn load( this.minimum_release_age_excludes = exclusions; } + if (config.public_hoist_pattern) |public_hoist_pattern| { + this.public_hoist_pattern = public_hoist_pattern; + } + + if (config.hoist_pattern) |hoist_pattern| { + this.hoist_pattern = hoist_pattern; + } + this.explicit_global_directory = config.global_dir orelse this.explicit_global_directory; } diff --git a/src/install/PnpmMatcher.zig b/src/install/PnpmMatcher.zig new file mode 100644 index 0000000000..0e7cea6ab1 --- /dev/null +++ b/src/install/PnpmMatcher.zig @@ -0,0 +1,198 @@ +/// https://github.com/pnpm/pnpm/blob/3abd3946237aa6ba7831552310ec371ddd3616c2/config/matcher/src/index.ts +const PnpmMatcher = @This(); + +matchers: []const Matcher, +behavior: Behavior, + +const Matcher = struct { + pattern: union(enum) { + match_all, + regex: *jsc.RegularExpression, + }, + is_exclude: bool, +}; + +const Behavior = enum { + all_matchers_include, + all_matchers_exclude, + has_exclude_and_include_matchers, +}; + +const FromExprError = OOM || error{ + InvalidRegExp, + UnexpectedExpr, +}; + +pub fn fromExpr(allocator: std.mem.Allocator, expr: ast.Expr, log: *logger.Log, source: *const logger.Source) FromExprError!PnpmMatcher { + var buf: collections.ArrayListDefault(u8) = .init(); + defer buf.deinit(); + + bun.jsc.initialize(false); + + var matchers: collections.ArrayListDefault(Matcher) = .init(); + + var has_include = false; + var has_exclude = false; + + switch (expr.data) { + .e_string => { + const pattern = expr.data.e_string.slice(allocator); + const matcher = createMatcher(pattern, &buf) catch |err| switch (err) { + error.OutOfMemory => return err, + error.InvalidRegExp => { + try log.addErrorFmtOpts(allocator, "Invalid regex: {s}", .{pattern}, .{ + .loc = expr.loc, + .redact_sensitive_information = true, + .source = source, + }); + return err; + }, + }; + has_include = has_include or !matcher.is_exclude; + has_exclude = has_exclude or matcher.is_exclude; + try matchers.append(matcher); + }, + .e_array => |patterns| { + for (patterns.slice()) |pattern_expr| { + if (try pattern_expr.asStringCloned(allocator)) |pattern| { + const matcher = createMatcher(pattern, &buf) catch |err| switch (err) { + error.OutOfMemory => return err, + error.InvalidRegExp => { + try log.addErrorFmtOpts(allocator, "Invalid regex: {s}", .{pattern}, .{ + .loc = pattern_expr.loc, + .redact_sensitive_information = true, + .source = source, + }); + return err; + }, + }; + has_include = has_include or !matcher.is_exclude; + has_exclude = has_exclude or matcher.is_exclude; + try matchers.append(matcher); + } else { + try log.addErrorOpts("Expected a string", .{ + .loc = pattern_expr.loc, + .redact_sensitive_information = true, + .source = source, + }); + return error.UnexpectedExpr; + } + } + }, + else => { + try log.addErrorOpts("Expected a string or an array of strings", .{ + .loc = expr.loc, + .redact_sensitive_information = true, + .source = source, + }); + return error.UnexpectedExpr; + }, + } + + const behavior: Behavior = if (!has_include) + .all_matchers_exclude + else if (!has_exclude) + .all_matchers_include + else + .has_exclude_and_include_matchers; + + return .{ + .matchers = try matchers.toOwnedSlice(), + .behavior = behavior, + }; +} + +const CreateMatcherError = OOM || error{InvalidRegExp}; + +fn createMatcher(raw: []const u8, buf: *collections.ArrayListDefault(u8)) CreateMatcherError!Matcher { + buf.clearRetainingCapacity(); + var writer = buf.writer(); + + var trimmed = strings.trim(raw, &strings.whitespace_chars); + + var is_exclude = false; + if (strings.startsWithChar(trimmed, '!')) { + is_exclude = true; + trimmed = trimmed[1..]; + } + + if (strings.eqlComptime(trimmed, "*")) { + return .{ .pattern = .match_all, .is_exclude = is_exclude }; + } + + try writer.writeByte('^'); + try strings.escapeRegExpForPackageNameMatching(trimmed, writer); + try writer.writeByte('$'); + + const regex = try jsc.RegularExpression.init(.cloneUTF8(buf.items()), .none); + + return .{ .pattern = .{ .regex = regex }, .is_exclude = is_exclude }; +} + +pub fn isMatch(this: *const PnpmMatcher, name: []const u8) bool { + if (this.matchers.len == 0) { + return false; + } + + const name_str: String = .fromBytes(name); + + switch (this.behavior) { + .all_matchers_include => { + for (this.matchers) |matcher| { + switch (matcher.pattern) { + .match_all => { + return true; + }, + .regex => |regex| { + if (regex.matches(name_str)) { + return true; + } + }, + } + } + return false; + }, + .all_matchers_exclude => { + for (this.matchers) |matcher| { + switch (matcher.pattern) { + .match_all => { + return false; + }, + .regex => |regex| { + if (regex.matches(name_str)) { + return false; + } + }, + } + } + return true; + }, + .has_exclude_and_include_matchers => { + var matches = false; + for (this.matchers) |matcher| { + switch (matcher.pattern) { + .match_all => { + matches = !matcher.is_exclude; + }, + .regex => |regex| { + if (regex.matches(name_str)) { + matches = !matcher.is_exclude; + } + }, + } + } + return matches; + }, + } +} + +const std = @import("std"); + +const bun = @import("bun"); +const OOM = bun.OOM; +const String = bun.String; +const ast = bun.ast; +const collections = bun.collections; +const jsc = bun.jsc; +const logger = bun.logger; +const strings = bun.strings; diff --git a/src/install/install.zig b/src/install/install.zig index 091d353d13..3ccf3d3bcc 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -256,6 +256,7 @@ pub const Repository = @import("./repository.zig").Repository; pub const Resolution = @import("./resolution.zig").Resolution; pub const Store = @import("./isolated_install/Store.zig").Store; pub const FileCopier = @import("./isolated_install/FileCopier.zig").FileCopier; +pub const PnpmMatcher = @import("./PnpmMatcher.zig"); pub const ArrayIdentityContext = @import("../identity_context.zig").ArrayIdentityContext; pub const IdentityContext = @import("../identity_context.zig").IdentityContext; diff --git a/src/install/isolated_install.zig b/src/install/isolated_install.zig index 3e000a68b4..48e85bd5aa 100644 --- a/src/install/isolated_install.zig +++ b/src/install/isolated_install.zig @@ -419,6 +419,12 @@ pub fn installIsolatedPackages( .entry_parent_id = .invalid, }); + var public_hoisted: bun.StringArrayHashMap(void) = .init(manager.allocator); + defer public_hoisted.deinit(); + + var hidden_hoisted: bun.StringArrayHashMap(void) = .init(manager.allocator); + defer hidden_hoisted.deinit(); + // Second pass: Deduplicate nodes when the pkg_id and peer set match an existing entry. next_entry: while (entry_queue.readItem()) |entry| { const pkg_id = node_pkg_ids[entry.node_id.get()]; @@ -512,11 +518,32 @@ pub fn installIsolatedPackages( var new_entry_parents: std.ArrayListUnmanaged(Store.Entry.Id) = try .initCapacity(lockfile.allocator, 1); new_entry_parents.appendAssumeCapacity(entry.entry_parent_id); + const hoisted = hoisted: { + if (new_entry_dep_id == invalid_dependency_id) { + break :hoisted false; + } + + const dep_name = dependencies[new_entry_dep_id].name.slice(string_buf); + + const hoist_pattern = manager.options.hoist_pattern orelse { + const hoist_entry = try hidden_hoisted.getOrPut(dep_name); + break :hoisted !hoist_entry.found_existing; + }; + + if (hoist_pattern.isMatch(dep_name)) { + const hoist_entry = try hidden_hoisted.getOrPut(dep_name); + break :hoisted !hoist_entry.found_existing; + } + + break :hoisted false; + }; + const new_entry: Store.Entry = .{ .node_id = entry.node_id, .dependencies = new_entry_dependencies, .parents = new_entry_parents, .peer_hash = new_entry_peer_hash, + .hoisted = hoisted, }; const new_entry_id: Store.Entry.Id = .from(@intCast(store.len)); @@ -539,6 +566,29 @@ pub fn installIsolatedPackages( .{ .entry_id = new_entry_id, .dep_id = new_entry_dep_id }, &ctx, ); + + if (new_entry_dep_id != invalid_dependency_id) { + if (entry.entry_parent_id == .root) { + // make sure direct dependencies are not replaced + const dep_name = dependencies[new_entry_dep_id].name.slice(string_buf); + try public_hoisted.put(dep_name, {}); + } else { + // transitive dependencies (also direct dependencies of workspaces!) + const dep_name = dependencies[new_entry_dep_id].name.slice(string_buf); + if (manager.options.public_hoist_pattern) |public_hoist_pattern| { + if (public_hoist_pattern.isMatch(dep_name)) { + const hoist_entry = try public_hoisted.getOrPut(dep_name); + if (!hoist_entry.found_existing) { + try entry_dependencies[0].insert( + lockfile.allocator, + .{ .entry_id = new_entry_id, .dep_id = new_entry_dep_id }, + &ctx, + ); + } + } + } + } + } } try dedupe_entry.value_ptr.append(lockfile.allocator, .{ @@ -566,6 +616,162 @@ pub fn installIsolatedPackages( }; }; + // setup node_modules/.bun + const is_new_bun_modules = is_new_bun_modules: { + const node_modules_path = bun.OSPathLiteral("node_modules"); + const bun_modules_path = bun.OSPathLiteral("node_modules/" ++ Store.modules_dir_name); + + sys.mkdirat(FD.cwd(), node_modules_path, 0o755).unwrap() catch { + sys.mkdirat(FD.cwd(), bun_modules_path, 0o755).unwrap() catch { + break :is_new_bun_modules false; + }; + + // 'node_modules' exists and 'node_modules/.bun' doesn't + + if (comptime Environment.isWindows) { + // Windows: + // 1. create 'node_modules/.old_modules-{hex}' + // 2. for each entry in 'node_modules' rename into 'node_modules/.old_modules-{hex}' + // 3. for each workspace 'node_modules' rename into 'node_modules/.old_modules-{hex}/old_{basename}_modules' + + var rename_path: bun.AutoRelPath = .init(); + defer rename_path.deinit(); + + { + var mkdir_path: bun.RelPath(.{ .sep = .auto, .unit = .u16 }) = .from("node_modules"); + defer mkdir_path.deinit(); + + mkdir_path.appendFmt(".old_modules-{s}", .{&std.fmt.bytesToHex(std.mem.asBytes(&bun.fastRandom()), .lower)}); + rename_path.append(mkdir_path.slice()); + + // 1 + sys.mkdirat(FD.cwd(), mkdir_path.sliceZ(), 0o755).unwrap() catch { + break :is_new_bun_modules true; + }; + } + + const node_modules = bun.openDirForIteration(FD.cwd(), "node_modules").unwrap() catch { + break :is_new_bun_modules true; + }; + + var entry_path: bun.AutoRelPath = .from("node_modules"); + defer entry_path.deinit(); + + // 2 + var node_modules_iter = bun.DirIterator.iterate(node_modules, .u8); + while (node_modules_iter.next().unwrap() catch break :is_new_bun_modules true) |entry| { + if (bun.strings.startsWithChar(entry.name.slice(), '.')) { + continue; + } + + var entry_path_save = entry_path.save(); + defer entry_path_save.restore(); + + entry_path.append(entry.name.slice()); + + var rename_path_save = rename_path.save(); + defer rename_path_save.restore(); + + rename_path.append(entry.name.slice()); + + sys.renameat(FD.cwd(), entry_path.sliceZ(), FD.cwd(), rename_path.sliceZ()).unwrap() catch {}; + } + + // 3 + for (lockfile.workspace_paths.values()) |workspace_path| { + var workspace_node_modules: bun.AutoRelPath = .from(workspace_path.slice(lockfile.buffers.string_bytes.items)); + defer workspace_node_modules.deinit(); + + const basename = workspace_node_modules.basename(); + + workspace_node_modules.append("node_modules"); + + var rename_path_save = rename_path.save(); + defer rename_path_save.restore(); + + rename_path.appendFmt(".old_{s}_modules", .{basename}); + + sys.renameat(FD.cwd(), workspace_node_modules.sliceZ(), FD.cwd(), rename_path.sliceZ()).unwrap() catch {}; + } + } else { + + // Posix: + // 1. rename existing 'node_modules' to temp location + // 2. create new 'node_modules' directory + // 3. rename temp into 'node_modules/.old_modules-{hex}' + // 4. attempt renaming 'node_modules/.old_modules-{hex}/.cache' to 'node_modules/.cache' + // 5. rename each workspace 'node_modules' into 'node_modules/.old_modules-{hex}/old_{basename}_modules' + var temp_node_modules_buf: bun.PathBuffer = undefined; + const temp_node_modules = bun.fs.FileSystem.tmpname("tmp_modules", &temp_node_modules_buf, bun.fastRandom()) catch unreachable; + + // 1 + sys.renameat(FD.cwd(), "node_modules", FD.cwd(), temp_node_modules).unwrap() catch { + break :is_new_bun_modules true; + }; + + // 2 + sys.mkdirat(FD.cwd(), node_modules_path, 0o755).unwrap() catch |err| { + Output.err(err, "failed to create './node_modules'", .{}); + Global.exit(1); + }; + + sys.mkdirat(FD.cwd(), bun_modules_path, 0o755).unwrap() catch |err| { + Output.err(err, "failed to create './node_modules/.bun'", .{}); + Global.exit(1); + }; + + var rename_path: bun.AutoRelPath = .from("node_modules"); + defer rename_path.deinit(); + + rename_path.appendFmt(".old_modules-{s}", .{&std.fmt.bytesToHex(std.mem.asBytes(&bun.fastRandom()), .lower)}); + + // 3 + sys.renameat(FD.cwd(), temp_node_modules, FD.cwd(), rename_path.sliceZ()).unwrap() catch { + break :is_new_bun_modules true; + }; + + rename_path.append(".cache"); + + var cache_path: bun.AutoRelPath = .from("node_modules"); + defer cache_path.deinit(); + + cache_path.append(".cache"); + + // 4 + sys.renameat(FD.cwd(), rename_path.sliceZ(), FD.cwd(), cache_path.sliceZ()).unwrap() catch {}; + + // remove .cache so we can append destination for each workspace + rename_path.undo(1); + + // 5 + for (lockfile.workspace_paths.values()) |workspace_path| { + var workspace_node_modules: bun.AutoRelPath = .from(workspace_path.slice(lockfile.buffers.string_bytes.items)); + defer workspace_node_modules.deinit(); + + const basename = workspace_node_modules.basename(); + + workspace_node_modules.append("node_modules"); + + var rename_path_save = rename_path.save(); + defer rename_path_save.restore(); + + rename_path.appendFmt(".old_{s}_modules", .{basename}); + + sys.renameat(FD.cwd(), workspace_node_modules.sliceZ(), FD.cwd(), rename_path.sliceZ()).unwrap() catch {}; + } + } + + break :is_new_bun_modules true; + }; + + sys.mkdirat(FD.cwd(), bun_modules_path, 0o755).unwrap() catch |err| { + Output.err(err, "failed to create './node_modules/.bun'", .{}); + Global.exit(1); + }; + + break :is_new_bun_modules true; + }; + { var root_node: *Progress.Node = undefined; var download_node: Progress.Node = undefined; @@ -593,6 +799,7 @@ pub fn installIsolatedPackages( const entry_node_ids = entries.items(.node_id); const entry_steps = entries.items(.step); const entry_dependencies = entries.items(.dependencies); + const entry_hoisted = entries.items(.hoisted); const string_buf = lockfile.buffers.string_bytes.items; @@ -624,6 +831,7 @@ pub fn installIsolatedPackages( .trusted_dependencies_mutex = .{}, .trusted_dependencies_from_update_requests = manager.findTrustedDependenciesFromUpdateRequests(), .supported_backend = .init(PackageInstall.supported_method), + .is_new_bun_modules = is_new_bun_modules, }; for (tasks, 0..) |*task, _entry_id| { @@ -638,161 +846,6 @@ pub fn installIsolatedPackages( }; } - const is_new_bun_modules = is_new_bun_modules: { - const node_modules_path = bun.OSPathLiteral("node_modules"); - const bun_modules_path = bun.OSPathLiteral("node_modules/" ++ Store.modules_dir_name); - - sys.mkdirat(FD.cwd(), node_modules_path, 0o755).unwrap() catch { - sys.mkdirat(FD.cwd(), bun_modules_path, 0o755).unwrap() catch { - break :is_new_bun_modules false; - }; - - // 'node_modules' exists and 'node_modules/.bun' doesn't - - if (comptime Environment.isWindows) { - // Windows: - // 1. create 'node_modules/.old_modules-{hex}' - // 2. for each entry in 'node_modules' rename into 'node_modules/.old_modules-{hex}' - // 3. for each workspace 'node_modules' rename into 'node_modules/.old_modules-{hex}/old_{basename}_modules' - - var rename_path: bun.AutoRelPath = .init(); - defer rename_path.deinit(); - - { - var mkdir_path: bun.RelPath(.{ .sep = .auto, .unit = .u16 }) = .from("node_modules"); - defer mkdir_path.deinit(); - - mkdir_path.appendFmt(".old_modules-{s}", .{&std.fmt.bytesToHex(std.mem.asBytes(&bun.fastRandom()), .lower)}); - rename_path.append(mkdir_path.slice()); - - // 1 - sys.mkdirat(FD.cwd(), mkdir_path.sliceZ(), 0o755).unwrap() catch { - break :is_new_bun_modules true; - }; - } - - const node_modules = bun.openDirForIteration(FD.cwd(), "node_modules").unwrap() catch { - break :is_new_bun_modules true; - }; - - var entry_path: bun.AutoRelPath = .from("node_modules"); - defer entry_path.deinit(); - - // 2 - var node_modules_iter = bun.DirIterator.iterate(node_modules, .u8); - while (node_modules_iter.next().unwrap() catch break :is_new_bun_modules true) |entry| { - if (bun.strings.startsWithChar(entry.name.slice(), '.')) { - continue; - } - - var entry_path_save = entry_path.save(); - defer entry_path_save.restore(); - - entry_path.append(entry.name.slice()); - - var rename_path_save = rename_path.save(); - defer rename_path_save.restore(); - - rename_path.append(entry.name.slice()); - - sys.renameat(FD.cwd(), entry_path.sliceZ(), FD.cwd(), rename_path.sliceZ()).unwrap() catch {}; - } - - // 3 - for (lockfile.workspace_paths.values()) |workspace_path| { - var workspace_node_modules: bun.AutoRelPath = .from(workspace_path.slice(string_buf)); - defer workspace_node_modules.deinit(); - - const basename = workspace_node_modules.basename(); - - workspace_node_modules.append("node_modules"); - - var rename_path_save = rename_path.save(); - defer rename_path_save.restore(); - - rename_path.appendFmt(".old_{s}_modules", .{basename}); - - sys.renameat(FD.cwd(), workspace_node_modules.sliceZ(), FD.cwd(), rename_path.sliceZ()).unwrap() catch {}; - } - } else { - - // Posix: - // 1. rename existing 'node_modules' to temp location - // 2. create new 'node_modules' directory - // 3. rename temp into 'node_modules/.old_modules-{hex}' - // 4. attempt renaming 'node_modules/.old_modules-{hex}/.cache' to 'node_modules/.cache' - // 5. rename each workspace 'node_modules' into 'node_modules/.old_modules-{hex}/old_{basename}_modules' - var temp_node_modules_buf: bun.PathBuffer = undefined; - const temp_node_modules = bun.fs.FileSystem.tmpname("tmp_modules", &temp_node_modules_buf, bun.fastRandom()) catch unreachable; - - // 1 - sys.renameat(FD.cwd(), "node_modules", FD.cwd(), temp_node_modules).unwrap() catch { - break :is_new_bun_modules true; - }; - - // 2 - sys.mkdirat(FD.cwd(), node_modules_path, 0o755).unwrap() catch |err| { - Output.err(err, "failed to create './node_modules'", .{}); - Global.exit(1); - }; - - sys.mkdirat(FD.cwd(), bun_modules_path, 0o755).unwrap() catch |err| { - Output.err(err, "failed to create './node_modules/.bun'", .{}); - Global.exit(1); - }; - - var rename_path: bun.AutoRelPath = .from("node_modules"); - defer rename_path.deinit(); - - rename_path.appendFmt(".old_modules-{s}", .{&std.fmt.bytesToHex(std.mem.asBytes(&bun.fastRandom()), .lower)}); - - // 3 - sys.renameat(FD.cwd(), temp_node_modules, FD.cwd(), rename_path.sliceZ()).unwrap() catch { - break :is_new_bun_modules true; - }; - - rename_path.append(".cache"); - - var cache_path: bun.AutoRelPath = .from("node_modules"); - defer cache_path.deinit(); - - cache_path.append(".cache"); - - // 4 - sys.renameat(FD.cwd(), rename_path.sliceZ(), FD.cwd(), cache_path.sliceZ()).unwrap() catch {}; - - // remove .cache so we can append destination for each workspace - rename_path.undo(1); - - // 5 - for (lockfile.workspace_paths.values()) |workspace_path| { - var workspace_node_modules: bun.AutoRelPath = .from(workspace_path.slice(string_buf)); - defer workspace_node_modules.deinit(); - - const basename = workspace_node_modules.basename(); - - workspace_node_modules.append("node_modules"); - - var rename_path_save = rename_path.save(); - defer rename_path_save.restore(); - - rename_path.appendFmt(".old_{s}_modules", .{basename}); - - sys.renameat(FD.cwd(), workspace_node_modules.sliceZ(), FD.cwd(), rename_path.sliceZ()).unwrap() catch {}; - } - } - - break :is_new_bun_modules true; - }; - - sys.mkdirat(FD.cwd(), bun_modules_path, 0o755).unwrap() catch |err| { - Output.err(err, "failed to create './node_modules/.bun'", .{}); - Global.exit(1); - }; - - break :is_new_bun_modules true; - }; - // add the pending task count upfront manager.incrementPendingTasks(@intCast(store.entries.len)); for (0..store.entries.len) |_entry_id| { @@ -893,6 +946,9 @@ pub fn installIsolatedPackages( }; if (!needs_install) { + if (entry_hoisted[entry_id.get()]) { + installer.linkToHiddenNodeModules(entry_id); + } // .monotonic is okay because the task isn't running on another thread. entry_steps[entry_id.get()].store(.done, .monotonic); installer.onTaskComplete(entry_id, .skipped); diff --git a/src/install/isolated_install/Installer.zig b/src/install/isolated_install/Installer.zig index 59368ae563..9985c8882e 100644 --- a/src/install/isolated_install/Installer.zig +++ b/src/install/isolated_install/Installer.zig @@ -7,6 +7,7 @@ pub const Installer = struct { installed: Bitset, install_node: ?*Progress.Node, scripts_node: ?*Progress.Node, + is_new_bun_modules: bool, manager: *PackageManager, command_ctx: Command.Context, @@ -442,6 +443,7 @@ pub const Installer = struct { const entry_dependencies = entries.items(.dependencies); const entry_steps = entries.items(.step); const entry_scripts = entries.items(.scripts); + const entry_hoisted = entries.items(.hoisted); const nodes = installer.store.nodes.slice(); const node_pkg_ids = nodes.items(.pkg_id); @@ -889,40 +891,10 @@ pub const Installer = struct { .local_tarball, .remote_tarball, => { - const string_buf = lockfile.buffers.string_bytes.items; - - var hidden_hoisted_node_modules: bun.Path(.{ .sep = .auto }) = .init(); - defer hidden_hoisted_node_modules.deinit(); - - hidden_hoisted_node_modules.append( - "node_modules" ++ std.fs.path.sep_str ++ ".bun" ++ std.fs.path.sep_str ++ "node_modules", - ); - hidden_hoisted_node_modules.append(pkg_name.slice(installer.lockfile.buffers.string_bytes.items)); - - var target: bun.RelPath(.{ .sep = .auto }) = .init(); - defer target.deinit(); - - target.append(".."); - if (strings.containsChar(pkg_name.slice(installer.lockfile.buffers.string_bytes.items), '/')) { - target.append(".."); + if (!entry_hoisted[this.entry_id.get()]) { + continue :next_step this.nextStep(current_step); } - - target.appendFmt("{}/node_modules/{s}", .{ - Store.Entry.fmtStorePath(this.entry_id, installer.store, installer.lockfile), - pkg_name.slice(string_buf), - }); - - var full_target: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); - defer full_target.deinit(); - - installer.appendStorePath(&full_target, this.entry_id); - - const symlinker: Symlinker = .{ - .dest = hidden_hoisted_node_modules, - .target = target, - .fallback_junction_target = full_target, - }; - _ = symlinker.ensureSymlink(.ignore_failure); + installer.linkToHiddenNodeModules(this.entry_id); }, } @@ -1223,6 +1195,54 @@ pub const Installer = struct { return .none; } + pub fn linkToHiddenNodeModules(this: *const Installer, entry_id: Store.Entry.Id) void { + const string_buf = this.lockfile.buffers.string_bytes.items; + + const node_id = this.store.entries.items(.node_id)[entry_id.get()]; + const pkg_id = this.store.nodes.items(.pkg_id)[node_id.get()]; + const pkg_name = this.lockfile.packages.items(.name)[pkg_id]; + + var hidden_hoisted_node_modules: bun.Path(.{ .sep = .auto }) = .init(); + defer hidden_hoisted_node_modules.deinit(); + + hidden_hoisted_node_modules.append( + "node_modules" ++ std.fs.path.sep_str ++ ".bun" ++ std.fs.path.sep_str ++ "node_modules", + ); + hidden_hoisted_node_modules.append(pkg_name.slice(string_buf)); + + var target: bun.RelPath(.{ .sep = .auto }) = .init(); + defer target.deinit(); + + target.append(".."); + if (strings.containsChar(pkg_name.slice(string_buf), '/')) { + target.append(".."); + } + + target.appendFmt("{}/node_modules/{s}", .{ + Store.Entry.fmtStorePath(entry_id, this.store, this.lockfile), + pkg_name.slice(string_buf), + }); + + var full_target: bun.AbsPath(.{ .sep = .auto }) = .initTopLevelDir(); + defer full_target.deinit(); + + this.appendStorePath(&full_target, entry_id); + + const symlinker: Symlinker = .{ + .dest = hidden_hoisted_node_modules, + .target = target, + .fallback_junction_target = full_target, + }; + + // symlinks won't exist if node_modules/.bun is new + const link_strategy: Symlinker.Strategy = if (this.is_new_bun_modules) + .expect_missing + else + .expect_existing; + + _ = symlinker.ensureSymlink(link_strategy); + } + pub fn linkDependencyBins(this: *const Installer, parent_entry_id: Store.Entry.Id) !void { const lockfile = this.lockfile; const store = this.store; diff --git a/src/install/isolated_install/Store.zig b/src/install/isolated_install/Store.zig index 14cf02cca6..8baf8c11a7 100644 --- a/src/install/isolated_install/Store.zig +++ b/src/install/isolated_install/Store.zig @@ -103,6 +103,9 @@ pub const Store = struct { parents: std.ArrayListUnmanaged(Id) = .empty, step: std.atomic.Value(Installer.Task.Step) = .init(.link_package), + // if true this entry gets symlinked to `node_modules/.bun/node_modules` + hoisted: bool, + peer_hash: PeerHash, scripts: ?*Package.Scripts.List = null, diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index a38ec1b915..1f3d147594 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -25,6 +25,13 @@ export const patchInternals = { const shellLex = $newZigFunction("shell.zig", "TestingAPIs.shellLex", 2); const shellParse = $newZigFunction("shell.zig", "TestingAPIs.shellParse", 2); +export const escapeRegExp = $newZigFunction("escapeRegExp.zig", "jsEscapeRegExp", 1); +export const escapeRegExpForPackageNameMatching = $newZigFunction( + "escapeRegExp.zig", + "jsEscapeRegExpForPackageNameMatching", + 1, +); + export const shellInternals = { lex: (a, ...b) => shellLex(a.raw, b), parse: (a, ...b) => shellParse(a.raw, b), diff --git a/src/string/escapeRegExp.zig b/src/string/escapeRegExp.zig new file mode 100644 index 0000000000..cd8cafd9e8 --- /dev/null +++ b/src/string/escapeRegExp.zig @@ -0,0 +1,122 @@ +const special_characters = "|\\{}()[]^$+*?.-"; + +pub fn escapeRegExp(input: []const u8, writer: anytype) @TypeOf(writer).Error!void { + var remain = input; + + while (strings.indexOfAny(remain, special_characters)) |i| { + try writer.writeAll(remain[0..i]); + switch (remain[i]) { + '|', + '\\', + '{', + '}', + '(', + ')', + '[', + ']', + '^', + '$', + '+', + '*', + '?', + '.', + => |c| try writer.writeAll(&.{ '\\', c }), + '-' => try writer.writeAll("\\x2d"), + else => |c| { + if (comptime Environment.isDebug) { + unreachable; + } + try writer.writeByte(c); + }, + } + remain = remain[i + 1 ..]; + } + + try writer.writeAll(remain); +} + +/// '*' becomes '.*' instead of '\\*' +pub fn escapeRegExpForPackageNameMatching(input: []const u8, writer: anytype) @TypeOf(writer).Error!void { + var remain = input; + + while (strings.indexOfAny(remain, special_characters)) |i| { + try writer.writeAll(remain[0..i]); + switch (remain[i]) { + '|', + '\\', + '{', + '}', + '(', + ')', + '[', + ']', + '^', + '$', + '+', + '?', + '.', + => |c| try writer.writeAll(&.{ '\\', c }), + '*' => try writer.writeAll(".*"), + '-' => try writer.writeAll("\\x2d"), + else => |c| { + if (comptime Environment.isDebug) { + unreachable; + } + try writer.writeByte(c); + }, + } + remain = remain[i + 1 ..]; + } + + try writer.writeAll(remain); +} + +pub fn jsEscapeRegExp(global: *JSGlobalObject, call_frame: *jsc.CallFrame) JSError!JSValue { + const input_value = call_frame.argument(0); + + if (!input_value.isString()) { + return global.throw("expected string argument", .{}); + } + + var input = try input_value.toSlice(global, bun.default_allocator); + defer input.deinit(); + + var buf: bun.collections.ArrayListDefault(u8) = .init(); + defer buf.deinit(); + + try escapeRegExp(input.slice(), buf.writer()); + + var output = String.cloneUTF8(buf.items()); + + return output.toJS(global); +} + +pub fn jsEscapeRegExpForPackageNameMatching(global: *JSGlobalObject, call_frame: *jsc.CallFrame) JSError!JSValue { + const input_value = call_frame.argument(0); + + if (!input_value.isString()) { + return global.throw("expected string argument", .{}); + } + + var input = try input_value.toSlice(global, bun.default_allocator); + defer input.deinit(); + + var buf: bun.collections.ArrayListDefault(u8) = .init(); + defer buf.deinit(); + + try escapeRegExpForPackageNameMatching(input.slice(), buf.writer()); + + var output = String.cloneUTF8(buf.items()); + + return output.toJS(global); +} + +const bun = @import("bun"); +const Environment = bun.Environment; +const JSError = bun.JSError; +const String = bun.String; +const strings = bun.strings; + +const jsc = bun.jsc; +const JSGlobalObject = jsc.JSGlobalObject; +const JSValue = jsc.JSValue; diff --git a/src/string/immutable.zig b/src/string/immutable.zig index 07d99d292b..04bb476dce 100644 --- a/src/string/immutable.zig +++ b/src/string/immutable.zig @@ -2306,6 +2306,9 @@ pub const visibleCodepointWidthType = visible_.visibleCodepointWidthType; pub const escapeHTMLForLatin1Input = escapeHTML_.escapeHTMLForLatin1Input; pub const escapeHTMLForUTF16Input = escapeHTML_.escapeHTMLForUTF16Input; +pub const escapeRegExp = escapeRegExp_.escapeRegExp; +pub const escapeRegExpForPackageNameMatching = escapeRegExp_.escapeRegExpForPackageNameMatching; + pub const addNTPathPrefix = paths_.addNTPathPrefix; pub const addNTPathPrefixIfNeeded = paths_.addNTPathPrefixIfNeeded; pub const addLongPathPrefix = paths_.addLongPathPrefix; @@ -2347,6 +2350,7 @@ pub const CodePoint = i32; const string = []const u8; const escapeHTML_ = @import("./immutable/escapeHTML.zig"); +const escapeRegExp_ = @import("./escapeRegExp.zig"); const paths_ = @import("./immutable/paths.zig"); const std = @import("std"); const unicode = @import("./immutable/unicode.zig"); diff --git a/test/cli/install/public-hoist-pattern.test.ts b/test/cli/install/public-hoist-pattern.test.ts new file mode 100644 index 0000000000..345a50d263 --- /dev/null +++ b/test/cli/install/public-hoist-pattern.test.ts @@ -0,0 +1,417 @@ +import { spawn, write } from "bun"; +import { afterAll, beforeAll, describe, expect, test } from "bun:test"; +import { readlinkSync } from "fs"; +import { VerdaccioRegistry, bunEnv, bunExe, readdirSorted, runBunInstall } from "harness"; +import { join } from "path"; + +const registry = new VerdaccioRegistry(); + +beforeAll(async () => { + await registry.start(); +}); + +afterAll(() => { + registry.stop(); +}); + +describe("publicHoistPattern", () => { + test("bunfig string", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: "*typ*" }, + files: { + "package.json": JSON.stringify({ + name: "include-patterns", + dependencies: { + "two-range-deps": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bun", "@types", "two-range-deps"]); + }); + + test("bunfig array", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: ["*types*", "no-deps"] }, + files: { + "package.json": JSON.stringify({ + name: "array-patterns", + dependencies: { + "two-range-deps": "1.0.0", + "a-dep": "1.0.1", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist @types and no-deps + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + ".bun", + "@types", + "a-dep", + "no-deps", + "two-range-deps", + ]); + }); + + test("all exclude pattern", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: "!*" }, + files: { + "package.json": JSON.stringify({ + name: "exclude-all", + dependencies: { + "two-range-deps": "1.0.0", + "no-deps": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should not hoist any dependencies + const [nodeModules, hasTypes] = await Promise.all([ + readdirSorted(join(packageDir, "node_modules")), + Bun.file(join(packageDir, "node_modules", "@types")).exists(), + ]); + + expect(nodeModules).toEqual([".bun", "no-deps", "two-range-deps"]); + // Verify transitive deps are not hoisted + expect(hasTypes).toBeFalse(); + }); + + test("all include pattern", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: "*" }, + files: { + "package.json": JSON.stringify({ + name: "include-all", + dependencies: { + "two-range-deps": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist all dependencies including transitive + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + ".bun", + "@types", + "no-deps", + "two-range-deps", + ]); + }); + + test("mixed include and exclude patterns", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: ["*", "!@types*", "!no-deps"] }, + files: { + "package.json": JSON.stringify({ + name: "mixed-patterns", + dependencies: { + "two-range-deps": "1.0.0", + "a-dep": "1.0.1", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist everything except @types and no-deps + const [nodeModules, hasTypes, hasNoDeps] = await Promise.all([ + readdirSorted(join(packageDir, "node_modules")), + Bun.file(join(packageDir, "node_modules", "@types")).exists(), + Bun.file(join(packageDir, "node_modules", "no-deps")).exists(), + ]); + + expect(nodeModules).toEqual([".bun", "a-dep", "two-range-deps"]); + expect(hasTypes).toBeFalse(); + expect(hasNoDeps).toBeFalse(); + }); + + test("npmrc string configuration", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true }, + files: { + "package.json": JSON.stringify({ + name: "npmrc-string", + dependencies: { + "two-range-deps": "1.0.0", + }, + }), + ".npmrc": `public-hoist-pattern=*types*`, + }, + }); + + await runBunInstall(bunEnv, packageDir); + + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bun", "@types", "two-range-deps"]); + }); + + test("npmrc array configuration", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true }, + files: { + "package.json": JSON.stringify({ + name: "npmrc-array", + dependencies: { + "two-range-deps": "1.0.0", + "a-dep": "1.0.1", + }, + }), + ".npmrc": `public-hoist-pattern[]=*types* +public-hoist-pattern[]=no-deps`, + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist @types and no-deps + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + ".bun", + "@types", + "a-dep", + "no-deps", + "two-range-deps", + ]); + }); + + test("npmrc mixed patterns", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true }, + files: { + "package.json": JSON.stringify({ + name: "npmrc-mixed", + dependencies: { + "two-range-deps": "1.0.0", + "a-dep": "1.0.1", + }, + }), + ".npmrc": `public-hoist-pattern[]=* +public-hoist-pattern[]=!@types* +public-hoist-pattern[]=!no-deps`, + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist everything except @types and no-deps + const [nodeModules, hasTypes, hasNoDeps] = await Promise.all([ + readdirSorted(join(packageDir, "node_modules")), + Bun.file(join(packageDir, "node_modules", "@types")).exists(), + Bun.file(join(packageDir, "node_modules", "no-deps")).exists(), + ]); + + expect(nodeModules).toEqual([".bun", "a-dep", "two-range-deps"]); + expect(hasTypes).toBeFalse(); + expect(hasNoDeps).toBeFalse(); + }); + + test("exclude specific packages", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: ["*", "!two-range-deps"] }, + files: { + "package.json": JSON.stringify({ + name: "exclude-specific", + dependencies: { + "two-range-deps": "1.0.0", + "no-deps": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist everything, two-range-deps included because it's a direct dependency + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + ".bun", + "@types", + "no-deps", + "two-range-deps", + ]); + // two-range-deps should still be linked + expect(readlinkSync(join(packageDir, "node_modules", "two-range-deps"))).toBe( + join(".bun", "two-range-deps@1.0.0", "node_modules", "two-range-deps"), + ); + }); + + test("scoped package patterns", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: "@types/*" }, + files: { + "package.json": JSON.stringify({ + name: "scoped-patterns", + dependencies: { + "two-range-deps": "1.0.0", + "@types/is-number": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should only hoist @types packages + const [nodeModules, nodeModulesTypes, hasNoDeps] = await Promise.all([ + readdirSorted(join(packageDir, "node_modules")), + readdirSorted(join(packageDir, "node_modules", "@types")), + Bun.file(join(packageDir, "node_modules", "no-deps")).exists(), + ]); + + expect(nodeModules).toEqual([".bun", "@types", "two-range-deps"]); + expect(nodeModulesTypes).toEqual(["is-number"]); + expect(hasNoDeps).toBeFalse(); + }); + + test("complex pattern combinations", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { + isolated: true, + publicHoistPattern: ["@types/*", "no-*", "!no-deps", "a-*"], + }, + files: { + "package.json": JSON.stringify({ + name: "complex-patterns", + dependencies: { + "two-range-deps": "1.0.0", + "a-dep": "1.0.1", + "basic-1": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Should hoist: @types/*, a-* packages + // Should not hoist: no-deps (excluded by !no-deps, but matches no-*) + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([ + ".bun", + "@types", + "a-dep", + "basic-1", + "two-range-deps", + ]); + }); + + test("workspaces with publicHoistPattern", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true, publicHoistPattern: ["*types*", "no-deps"] }, + files: { + "package.json": JSON.stringify({ + name: "workspace-root", + workspaces: ["packages/*"], + dependencies: { + "no-deps": "1.0.0", + }, + }), + "packages/pkg1/package.json": JSON.stringify({ + name: "pkg1", + dependencies: { + "@types/is-number": "1.0.0", + "a-dep": "1.0.1", + }, + }), + "packages/pkg2/package.json": JSON.stringify({ + name: "pkg2", + dependencies: { + "two-range-deps": "1.0.0", + }, + }), + }, + }); + + await runBunInstall(bunEnv, packageDir); + + // Root should have hoisted packages + expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bun", "@types", "no-deps"]); + + // Workspace packages should have their dependencies + expect(await readdirSorted(join(packageDir, "packages", "pkg1", "node_modules"))).toEqual(["@types", "a-dep"]); + expect(await readdirSorted(join(packageDir, "packages", "pkg2", "node_modules"))).toEqual(["two-range-deps"]); + }); + + describe("error cases", () => { + test("invalid publicHoistPattern type in bunfig", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true }, + files: { + "package.json": JSON.stringify({ + name: "invalid-pattern-type", + dependencies: { + "no-deps": "1.0.0", + }, + }), + }, + }); + + // Manually write invalid bunfig + await write( + join(packageDir, "bunfig.toml"), + `[install] +cache = "${join(packageDir, ".bun-cache").replaceAll("\\", "\\\\")}" +registry = "${registry.registryUrl()}" +linker = "isolated" +publicHoistPattern = 123`, + ); + + const { stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + expect(await exited).not.toBe(0); + const err = await stderr.text(); + expect(err).toContain("error: Expected a string or an array of strings"); + }); + + test("malformed bunfig with array syntax", async () => { + const { packageDir } = await registry.createTestDir({ + bunfigOpts: { isolated: true }, + files: { + "package.json": JSON.stringify({ + name: "malformed-array", + dependencies: { + "no-deps": "1.0.0", + }, + }), + }, + }); + + // Should error from boolean in the array + await write( + join(packageDir, "bunfig.toml"), + `[install] +cache = "${join(packageDir, ".bun-cache").replaceAll("\\", "\\\\")}" +registry = "${registry.registryUrl()}" +linker = "isolated" +publicHoistPattern = ["*types*", true]`, + ); + + const { stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: packageDir, + env: bunEnv, + stdout: "pipe", + stderr: "pipe", + }); + + const err = await stderr.text(); + expect(await exited).toBe(1); + expect(err).toContain("error: Expected a string"); + }); + }); +}); diff --git a/test/harness.ts b/test/harness.ts index 0c513b3f6e..1d800e16ab 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -1755,6 +1755,13 @@ cache = "${join(dir, ".bun-cache").replaceAll("\\", "\\\\")}" bunfig += `registry = "${this.registryUrl()}"\n`; } bunfig += `linker = "${opts.isolated ? "isolated" : "hoisted"}"\n`; + if (opts.publicHoistPattern) { + if (typeof opts.publicHoistPattern === "string") { + bunfig += `publicHoistPattern = "${opts.publicHoistPattern}"`; + } else { + bunfig += `publicHoistPattern = [${opts.publicHoistPattern.map(p => `"${p}"`).join(", ")}]`; + } + } await write(join(dir, "bunfig.toml"), bunfig); } } @@ -1763,6 +1770,7 @@ type BunfigOpts = { saveTextLockfile?: boolean; npm?: boolean; isolated?: boolean; + publicHoistPattern?: string | string[]; }; export async function readdirSorted(path: string): Promise { diff --git a/test/js/bun/util/escapeRegExp.test.ts b/test/js/bun/util/escapeRegExp.test.ts new file mode 100644 index 0000000000..7ce2da33ec --- /dev/null +++ b/test/js/bun/util/escapeRegExp.test.ts @@ -0,0 +1,16 @@ +import testHelpers from "bun:internal-for-testing"; +import { expect, test } from "bun:test"; +const { escapeRegExp, escapeRegExpForPackageNameMatching } = testHelpers; + +test("escapeRegExp", () => { + expect(escapeRegExp("\\ ^ $ * + ? . ( ) | { } [ ]")).toBe("\\\\ \\^ \\$ \\* \\+ \\? \\. \\( \\) \\| \\{ \\} \\[ \\]"); + expect(escapeRegExp("foo - bar")).toBe("foo \\x2d bar"); +}); + +test("escapeRegExpForPackageName", () => { + // same as the other but '*' becomes '.*' instead of '\*' + expect(escapeRegExpForPackageNameMatching("foo - bar*")).toBe("foo \\x2d bar.*"); + expect(escapeRegExpForPackageNameMatching("\\ ^ $ * + ? . ( ) | { } [ ]")).toBe( + "\\\\ \\^ \\$ .* \\+ \\? \\. \\( \\) \\| \\{ \\} \\[ \\]", + ); +});