fix(install): isolated install with file dependency resolving to root package (#23204)

### What does this PR do?
Fixes `file:.` in root package.json or `file:../..` in workspace
package.json (if '../..' points to the root of the project)
### How did you verify your code works?
Added a test

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
This commit is contained in:
Dylan Conway
2025-10-03 02:38:55 -07:00
committed by GitHub
parent 693e7995bb
commit 666180d7fc
10 changed files with 228 additions and 68 deletions

View File

@@ -875,6 +875,14 @@ pub fn init(
};
manager.event_loop.loop().internal_loop_data.setParentEventLoop(bun.jsc.EventLoopHandle.init(&manager.event_loop));
manager.lockfile = try ctx.allocator.create(Lockfile);
{
// make sure folder packages can find the root package without creating a new one
var normalized: bun.AbsPath(.{ .sep = .posix }) = .from(root_package_json_path);
defer normalized.deinit();
try manager.folders.put(manager.allocator, FolderResolution.hash(normalized.slice()), .{ .package_id = 0 });
}
jsc.MiniEventLoop.global = &manager.event_loop.mini;
if (!manager.options.enable.cache) {
manager.options.enable.manifest_cache = false;

View File

@@ -808,7 +808,10 @@ pub fn parse(allocator: std.mem.Allocator, comptime subcommand: Subcommand) !Com
cli.lockfile_only = args.flag("--lockfile-only");
if (args.option("--linker")) |linker| {
cli.node_linker = .fromStr(linker);
cli.node_linker = Options.NodeLinker.fromStr(linker) orelse {
Output.errGeneric("Expected --linker to be one of 'isolated' or 'hoisted'", .{});
Global.exit(1);
};
}
if (args.option("--cache-dir")) |cache_dir| {

View File

@@ -59,7 +59,7 @@ pub fn installIsolatedPackages(
// First pass: create full dependency tree with resolved peers
next_node: while (node_queue.readItem()) |entry| {
{
check_cycle: {
// check for cycles
const nodes_slice = nodes.slice();
const node_pkg_ids = nodes_slice.items(.pkg_id);
@@ -74,11 +74,17 @@ pub fn installIsolatedPackages(
// 'node_modules/.bun/parent@version/node_modules'.
const dep_id = node_dep_ids[curr_id.get()];
if (dep_id == invalid_dependency_id or entry.dep_id == invalid_dependency_id) {
if (dep_id == invalid_dependency_id and entry.dep_id == invalid_dependency_id) {
node_nodes[entry.parent_id.get()].appendAssumeCapacity(curr_id);
continue :next_node;
}
if (dep_id == invalid_dependency_id or entry.dep_id == invalid_dependency_id) {
// one is the root package, one is a dependency on the root package (it has a valid dep_id)
// create a new node for it.
break :check_cycle;
}
// ensure the dependency name is the same before skipping the cycle. if they aren't
// we lose dependency name information for the symlinks
if (dependencies[dep_id].name_hash == dependencies[entry.dep_id].name_hash) {
@@ -93,7 +99,11 @@ pub fn installIsolatedPackages(
const node_id: Store.Node.Id = .from(@intCast(nodes.len));
const pkg_deps = pkg_dependency_slices[entry.pkg_id];
var skip_dependencies_of_workspace_node = false;
// for skipping dependnecies of workspace packages and the root package. the dependencies
// of these packages should only be pulled in once, but we might need to create more than
// one entry if there's multiple dependencies on the workspace or root package.
var skip_dependencies = entry.pkg_id == 0 and entry.dep_id != invalid_dependency_id;
if (entry.dep_id != invalid_dependency_id) {
const entry_dep = dependencies[entry.dep_id];
if (pkg_deps.len == 0 or entry_dep.version.tag == .workspace) dont_dedupe: {
@@ -106,6 +116,9 @@ pub fn installIsolatedPackages(
const node_dep_ids = nodes_slice.items(.dep_id);
const dedupe_dep_id = node_dep_ids[dedupe_node_id.get()];
if (dedupe_dep_id == invalid_dependency_id) {
break :dont_dedupe;
}
const dedupe_dep = dependencies[dedupe_dep_id];
if (dedupe_dep.name_hash != entry_dep.name_hash) {
@@ -115,7 +128,7 @@ pub fn installIsolatedPackages(
if (dedupe_dep.version.tag == .workspace and entry_dep.version.tag == .workspace) {
if (dedupe_dep.behavior.isWorkspace() != entry_dep.behavior.isWorkspace()) {
// only attach the dependencies to one of the workspaces
skip_dependencies_of_workspace_node = true;
skip_dependencies = true;
break :dont_dedupe;
}
}
@@ -132,8 +145,8 @@ pub fn installIsolatedPackages(
.pkg_id = entry.pkg_id,
.dep_id = entry.dep_id,
.parent_id = entry.parent_id,
.nodes = if (skip_dependencies_of_workspace_node) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len),
.dependencies = if (skip_dependencies_of_workspace_node) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len),
.nodes = if (skip_dependencies) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len),
.dependencies = if (skip_dependencies) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len),
});
const nodes_slice = nodes.slice();
@@ -146,7 +159,7 @@ pub fn installIsolatedPackages(
node_nodes[parent_id].appendAssumeCapacity(node_id);
}
if (skip_dependencies_of_workspace_node) {
if (skip_dependencies) {
continue;
}
@@ -411,6 +424,11 @@ pub fn installIsolatedPackages(
const curr_dep_id = node_dep_ids[entry.node_id.get()];
for (dedupe_entry.value_ptr.items) |info| {
if (info.dep_id == invalid_dependency_id or curr_dep_id == invalid_dependency_id) {
if (info.dep_id != curr_dep_id) {
continue;
}
}
if (info.dep_id != invalid_dependency_id and curr_dep_id != invalid_dependency_id) {
const curr_dep = dependencies[curr_dep_id];
const existing_dep = dependencies[info.dep_id];
@@ -685,6 +703,7 @@ pub fn installIsolatedPackages(
const node_id = entry_node_ids[entry_id.get()];
const pkg_id = node_pkg_ids[node_id.get()];
const dep_id = node_dep_ids[node_id.get()];
const pkg_name = pkg_names[pkg_id];
const pkg_name_hash = pkg_name_hashes[pkg_id];
@@ -700,15 +719,15 @@ pub fn installIsolatedPackages(
continue;
},
.root => {
// .monotonic is okay in this block because the task isn't running on another
// thread.
if (entry_id == .root) {
if (dep_id == invalid_dependency_id) {
// .monotonic is okay in this block because the task isn't running on another
// thread.
entry_steps[entry_id.get()].store(.symlink_dependencies, .monotonic);
installer.startTask(entry_id);
continue;
} else {
// dep_id is valid meaning this was a dependency that resolved to the root
// package. it gets an entry in the store.
}
entry_steps[entry_id.get()].store(.done, .monotonic);
installer.onTaskComplete(entry_id, .skipped);
installer.startTask(entry_id);
continue;
},
.workspace => {
@@ -830,7 +849,6 @@ pub fn installIsolatedPackages(
.isolated_package_install_context = entry_id,
};
const dep_id = node_dep_ids[node_id.get()];
const dep = lockfile.buffers.dependencies.items[dep_id];
switch (pkg_res_tag) {

View File

@@ -3,8 +3,32 @@ pub const FileCopier = struct {
src_path: bun.AbsPath(.{ .sep = .auto, .unit = .os }),
dest_subpath: bun.RelPath(.{ .sep = .auto, .unit = .os }),
walker: Walker,
pub fn copy(this: *FileCopier, skip_dirnames: []const bun.OSPathSlice) OOM!sys.Maybe(void) {
pub fn init(
src_dir: FD,
src_path: bun.AbsPath(.{ .sep = .auto, .unit = .os }),
dest_subpath: bun.RelPath(.{ .sep = .auto, .unit = .os }),
skip_dirnames: []const bun.OSPathSlice,
) OOM!FileCopier {
return .{
.src_dir = src_dir,
.src_path = src_path,
.dest_subpath = dest_subpath,
.walker = try .walk(
src_dir,
bun.default_allocator,
&.{},
skip_dirnames,
),
};
}
pub fn deinit(this: *const FileCopier) void {
this.walker.deinit();
}
pub fn copy(this: *FileCopier) OOM!sys.Maybe(void) {
var dest_dir = bun.MakePath.makeOpenPath(FD.cwd().stdDir(), this.dest_subpath.sliceZ(), .{}) catch |err| {
// TODO: remove the need for this and implement openDir makePath makeOpenPath in bun
var errno: bun.sys.E = switch (@as(anyerror, err)) {
@@ -46,15 +70,7 @@ pub const FileCopier = struct {
var copy_file_state: bun.CopyFileState = .{};
var walker: Walker = try .walk(
this.src_dir,
bun.default_allocator,
&.{},
skip_dirnames,
);
defer walker.deinit();
while (switch (walker.next()) {
while (switch (this.walker.next()) {
.result => |res| res,
.err => |err| return .initErr(err),
}) |entry| {

View File

@@ -3,8 +3,32 @@ const Hardlinker = @This();
src_dir: FD,
src: bun.AbsPath(.{ .sep = .auto, .unit = .os }),
dest: bun.RelPath(.{ .sep = .auto, .unit = .os }),
walker: Walker,
pub fn link(this: *Hardlinker, skip_dirnames: []const bun.OSPathSlice) OOM!sys.Maybe(void) {
pub fn init(
folder_dir: FD,
src: bun.AbsPath(.{ .sep = .auto, .unit = .os }),
dest: bun.RelPath(.{ .sep = .auto, .unit = .os }),
skip_dirnames: []const bun.OSPathSlice,
) OOM!Hardlinker {
return .{
.src_dir = folder_dir,
.src = src,
.dest = dest,
.walker = try .walk(
folder_dir,
bun.default_allocator,
&.{},
skip_dirnames,
),
};
}
pub fn deinit(this: *Hardlinker) void {
this.walker.deinit();
}
pub fn link(this: *Hardlinker) OOM!sys.Maybe(void) {
if (bun.install.PackageManager.verbose_install) {
bun.Output.prettyErrorln(
\\Hardlinking {} to {}
@@ -14,16 +38,9 @@ pub fn link(this: *Hardlinker, skip_dirnames: []const bun.OSPathSlice) OOM!sys.M
bun.fmt.fmtOSPath(this.dest.slice(), .{ .path_sep = .auto }),
},
);
bun.Output.flush();
}
var walker: Walker = try .walk(
this.src_dir,
bun.default_allocator,
&.{},
skip_dirnames,
);
defer walker.deinit();
if (comptime Environment.isWindows) {
const cwd_buf = bun.w_path_buffer_pool.get();
defer bun.w_path_buffer_pool.put(cwd_buf);
@@ -31,7 +48,7 @@ pub fn link(this: *Hardlinker, skip_dirnames: []const bun.OSPathSlice) OOM!sys.M
return .initErr(bun.sys.Error.fromCode(bun.sys.E.ACCES, .link));
};
while (switch (walker.next()) {
while (switch (this.walker.next()) {
.result => |res| res,
.err => |err| return .initErr(err),
}) |entry| {
@@ -125,7 +142,7 @@ pub fn link(this: *Hardlinker, skip_dirnames: []const bun.OSPathSlice) OOM!sys.M
return .success;
}
while (switch (walker.next()) {
while (switch (this.walker.next()) {
.result => |res| res,
.err => |err| return .initErr(err),
}) |entry| {

View File

@@ -421,9 +421,14 @@ pub const Installer = struct {
};
},
.folder => {
.folder, .root => {
const path = switch (pkg_res.tag) {
.folder => pkg_res.value.folder.slice(string_buf),
.root => ".",
else => unreachable,
};
// the folder does not exist in the cache. xdev is per folder dependency
const folder_dir = switch (bun.openDirForIteration(FD.cwd(), pkg_res.value.folder.slice(string_buf))) {
const folder_dir = switch (bun.openDirForIteration(FD.cwd(), path)) {
.result => |fd| fd,
.err => |err| return .failure(.{ .link_package = err }),
};
@@ -440,13 +445,15 @@ pub const Installer = struct {
installer.appendStorePath(&dest, this.entry_id);
var hardlinker: Hardlinker = .{
.src_dir = folder_dir,
.src = src,
.dest = dest,
};
var hardlinker: Hardlinker = try .init(
folder_dir,
src,
dest,
&.{comptime bun.OSPathLiteral("node_modules")},
);
defer hardlinker.deinit();
switch (try hardlinker.link(&.{comptime bun.OSPathLiteral("node_modules")})) {
switch (try hardlinker.link()) {
.result => {},
.err => |err| {
if (err.getErrno() == .XDEV) {
@@ -501,13 +508,15 @@ pub const Installer = struct {
defer dest.deinit();
installer.appendStorePath(&dest, this.entry_id);
var file_copier: FileCopier = .{
.src_dir = folder_dir,
.src_path = src_path,
.dest_subpath = dest,
};
var file_copier: FileCopier = try .init(
folder_dir,
src_path,
dest,
&.{comptime bun.OSPathLiteral("node_modules")},
);
defer file_copier.deinit();
switch (try file_copier.copy(&.{})) {
switch (try file_copier.copy()) {
.result => {},
.err => |err| {
if (PackageManager.verbose_install) {
@@ -559,6 +568,18 @@ pub const Installer = struct {
continue :backend .hardlink;
}
if (installer.manager.options.log_level.isVerbose()) {
bun.Output.prettyErrorln(
\\Cloning {} to {}
,
.{
bun.fmt.fmtOSPath(pkg_cache_dir_subpath.sliceZ(), .{ .path_sep = .auto }),
bun.fmt.fmtOSPath(dest_subpath.sliceZ(), .{ .path_sep = .auto }),
},
);
bun.Output.flush();
}
switch (sys.clonefileat(cache_dir, pkg_cache_dir_subpath.sliceZ(), FD.cwd(), dest_subpath.sliceZ())) {
.result => {},
.err => |clonefile_err1| {
@@ -613,13 +634,15 @@ pub const Installer = struct {
defer src.deinit();
src.appendJoin(pkg_cache_dir_subpath.slice());
var hardlinker: Hardlinker = .{
.src_dir = cached_package_dir.?,
.src = src,
.dest = dest_subpath,
};
var hardlinker: Hardlinker = try .init(
cached_package_dir.?,
src,
dest_subpath,
&.{},
);
defer hardlinker.deinit();
switch (try hardlinker.link(&.{})) {
switch (try hardlinker.link()) {
.result => {},
.err => |err| {
if (err.getErrno() == .XDEV) {
@@ -678,13 +701,15 @@ pub const Installer = struct {
defer src_path.deinit();
src_path.append(pkg_cache_dir_subpath.slice());
var file_copier: FileCopier = .{
.src_dir = cached_package_dir.?,
.src_path = src_path,
.dest_subpath = dest_subpath,
};
var file_copier: FileCopier = try .init(
cached_package_dir.?,
src_path,
dest_subpath,
&.{},
);
defer file_copier.deinit();
switch (try file_copier.copy(&.{})) {
switch (try file_copier.copy()) {
.result => {},
.err => |err| {
if (PackageManager.verbose_install) {
@@ -1231,6 +1256,7 @@ pub const Installer = struct {
const nodes = this.store.nodes.slice();
const node_pkg_ids = nodes.items(.pkg_id);
const node_dep_ids = nodes.items(.dep_id);
// const node_peers = nodes.items(.peers);
const pkgs = this.lockfile.packages.slice();
@@ -1240,10 +1266,25 @@ pub const Installer = struct {
const node_id = entry_node_ids[entry_id.get()];
// const peers = node_peers[node_id.get()];
const pkg_id = node_pkg_ids[node_id.get()];
const dep_id = node_dep_ids[node_id.get()];
const pkg_res = pkg_resolutions[pkg_id];
switch (pkg_res.tag) {
.root => {},
.root => {
if (dep_id != invalid_dependency_id) {
const pkg_name = pkg_names[pkg_id];
buf.append("node_modules/" ++ Store.modules_dir_name);
buf.appendFmt("{}", .{
Store.Entry.fmtStorePath(entry_id, this.store, this.lockfile),
});
buf.append("node_modules");
if (pkg_name.isEmpty()) {
buf.append(std.fs.path.basename(bun.fs.FileSystem.instance.top_level_dir));
} else {
buf.append(pkg_name.slice(string_buf));
}
}
},
.workspace => {
buf.append(pkg_res.value.workspace.slice(string_buf));
},

View File

@@ -145,6 +145,15 @@ pub const Store = struct {
const pkg_res = pkg_resolutions[pkg_id];
switch (pkg_res.tag) {
.root => {
if (pkg_name.isEmpty()) {
try writer.writeAll(std.fs.path.basename(bun.fs.FileSystem.instance.top_level_dir));
} else {
try writer.print("{}@root", .{
pkg_name.fmtStorePath(string_buf),
});
}
},
.folder => {
try writer.print("{}@file+{}", .{
pkg_name.fmtStorePath(string_buf),

View File

@@ -1652,9 +1652,15 @@ pub fn parseIntoBinaryLockfile(
return error.InvalidPackageResolution;
};
const name_str, const res_str = Dependency.splitNameAndVersion(res_info_str) catch {
try log.addError(source, res_info.loc, "Invalid package resolution");
return error.InvalidPackageResolution;
const name_str, const res_str = name_and_res: {
if (strings.hasPrefixComptime(res_info_str, "@root:")) {
break :name_and_res .{ "", res_info_str[1..] };
}
break :name_and_res Dependency.splitNameAndVersion(res_info_str) catch {
try log.addError(source, res_info.loc, "Invalid package resolution");
return error.InvalidPackageResolution;
};
};
const name_hash = String.Builder.stringHash(name_str);

View File

@@ -109,7 +109,7 @@ pub fn next(self: *Walker) bun.sys.Maybe(?WalkerEntry) {
return .initResult(null);
}
pub fn deinit(self: *Walker) void {
pub fn deinit(self: *const Walker) void {
if (self.stack.items.len > 0) {
for (self.stack.items[1..]) |*item| {
if (self.stack.items.len != 0) {

View File

@@ -268,6 +268,48 @@ test("can install folder dependencies", async () => {
).toBe("module.exports = 'hello from pkg-1';");
});
test("can install folder dependencies on root package", async () => {
const { packageDir, packageJson } = await registry.createTestDir({ bunfigOpts: { isolated: true } });
await Promise.all([
write(
packageJson,
JSON.stringify({
name: "root-file-dep",
workspaces: ["packages/*"],
dependencies: {
self: "file:.",
},
}),
),
write(
join(packageDir, "packages", "pkg1", "package.json"),
JSON.stringify({
name: "pkg1",
dependencies: {
root: "file:../..",
},
}),
),
]);
console.log({ packageDir });
await runBunInstall(bunEnv, packageDir);
expect(
await Promise.all([
readlink(join(packageDir, "node_modules", "self")),
readlink(join(packageDir, "packages", "pkg1", "node_modules", "root")),
file(join(packageDir, "node_modules", "self", "package.json")).json(),
]),
).toEqual([
join(".bun", "root-file-dep@root", "node_modules", "root-file-dep"),
join("..", "..", "..", "node_modules", ".bun", "root-file-dep@root", "node_modules", "root-file-dep"),
await file(packageJson).json(),
]);
});
describe("isolated workspaces", () => {
test("basic", async () => {
const { packageJson, packageDir } = await registry.createTestDir({ bunfigOpts: { isolated: true } });