Compare commits

...

4 Commits

Author SHA1 Message Date
Dylan Conway
5d8fe74eb8 test 2025-10-27 19:44:16 -07:00
Dylan Conway
29cd7e9c68 update 2025-10-27 19:07:50 -07:00
Dylan Conway
89bbabaf66 Merge branch 'main' into dylan/workspace-root-dependency 2025-10-27 18:01:41 -07:00
Dylan Conway
64940c8c8a workspace dependency resolves to root package 2025-10-25 00:17:18 -07:00
7 changed files with 182 additions and 104 deletions

View File

@@ -1809,6 +1809,16 @@ fn getOrPutResolvedPackage(
break :blk Path.joinAbsStringBuf(FileSystem.instance.top_level_dir, &buf2, &[_]string{workspace_path}, .auto);
};
if (this.lockfile.rootPackage()) |root_pkg| {
const string_buf = this.lockfile.buffers.string_bytes.items;
if (name.eql(root_pkg.name, string_buf, string_buf) or
strings.eqlLong(FileSystem.instance.top_level_dir, workspace_path_u8, true))
{
successFn(this, dependency_id, 0);
return .{ .package = root_pkg };
}
}
const res = FolderResolution.getOrPut(.{ .relative = .workspace }, version, workspace_path_u8, this);
switch (res) {

View File

@@ -73,19 +73,19 @@ pub fn installIsolatedPackages(
// skip the new node, and add the previously added node to parent so it appears in
// 'node_modules/.bun/parent@version/node_modules'.
const dep_id = node_dep_ids[curr_id.get()];
if (dep_id == invalid_dependency_id and entry.dep_id == invalid_dependency_id) {
const curr_dep_id = node_dep_ids[curr_id.get()];
if (curr_dep_id == invalid_dependency_id and entry.dep_id == invalid_dependency_id) {
node_nodes[entry.parent_id.get()].appendAssumeCapacity(curr_id);
continue :next_node;
}
if (dep_id == invalid_dependency_id or entry.dep_id == invalid_dependency_id) {
if (curr_dep_id == invalid_dependency_id or entry.dep_id == invalid_dependency_id) {
// one is the root package, one is a dependency on the root package (it has a valid dep_id)
// create a new node for it.
break :check_cycle;
}
const curr_dep = dependencies[dep_id];
const curr_dep = dependencies[curr_dep_id];
const entry_dep = dependencies[entry.dep_id];
// ensure the dependency name is the same before skipping the cycle. if they aren't
@@ -154,6 +154,7 @@ pub fn installIsolatedPackages(
.parent_id = entry.parent_id,
.nodes = if (skip_dependencies) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len),
.dependencies = if (skip_dependencies) .empty else try .initCapacity(lockfile.allocator, pkg_deps.len),
.peers = .init(lockfile.allocator),
});
const nodes_slice = nodes.slice();
@@ -365,7 +366,7 @@ pub fn installIsolatedPackages(
.pkg_id = resolved_pkg_id,
.auto_installed = auto_installed,
};
try node_peers[visited_parent_id.get()].insert(lockfile.allocator, peer, &ctx);
try node_peers[visited_parent_id.get()].insert(peer, &ctx);
}
if (visited_parent_node_ids.items.len != 0) {
@@ -437,19 +438,41 @@ pub fn installIsolatedPackages(
const curr_dep_id = node_dep_ids[entry.node_id.get()];
for (dedupe_entry.value_ptr.items) |info| {
if (info.dep_id == invalid_dependency_id or curr_dep_id == invalid_dependency_id) {
if (info.dep_id != curr_dep_id) {
continue;
}
if (info.dep_id == invalid_dependency_id and curr_dep_id == invalid_dependency_id) {
try store.items(.dependencies)[entry.entry_parent_id.get()].insert(
.{ .entry_id = info.entry_id, .dep_id = curr_dep_id },
&.{ .string_buf = string_buf, .dependencies = dependencies },
);
try store.items(.parents)[info.entry_id.get()].append(lockfile.allocator, entry.entry_parent_id);
continue :next_entry;
}
if (info.dep_id != invalid_dependency_id and curr_dep_id != invalid_dependency_id) {
const curr_dep = dependencies[curr_dep_id];
const existing_dep = dependencies[info.dep_id];
if (existing_dep.version.tag == .workspace and curr_dep.version.tag == .workspace) {
if (existing_dep.behavior.isWorkspace() != curr_dep.behavior.isWorkspace()) {
continue;
}
if (info.dep_id == invalid_dependency_id or curr_dep_id == invalid_dependency_id) {
if ((info.dep_id != invalid_dependency_id and dependencies[info.dep_id].version.tag == .workspace) or
(curr_dep_id != invalid_dependency_id and dependencies[curr_dep_id].version.tag == .workspace))
{
// if the root dependency originates from a "workspace:" dependency and isn't injected
// dedupe. this means it becomes a dependency symlink in the existing entry node_modules
// but it does not become an entry
try store.items(.dependencies)[entry.entry_parent_id.get()].insert(
.{ .entry_id = info.entry_id, .dep_id = curr_dep_id },
&.{ .string_buf = string_buf, .dependencies = dependencies },
);
try store.items(.parents)[info.entry_id.get()].append(lockfile.allocator, entry.entry_parent_id);
continue :next_entry;
}
// one doesn't or both don't have a dependency (originates from the root package). assume
// it's injected and create a new entry
continue;
}
const curr_dep = dependencies[curr_dep_id];
const existing_dep = dependencies[info.dep_id];
if (existing_dep.version.tag == .workspace and curr_dep.version.tag == .workspace) {
if (existing_dep.behavior.isWorkspace() != curr_dep.behavior.isWorkspace()) {
continue;
}
}
@@ -476,7 +499,6 @@ pub fn installIsolatedPackages(
.dependencies = dependencies,
};
try entry_dependencies[entry.entry_parent_id.get()].insert(
lockfile.allocator,
.{ .entry_id = info.entry_id, .dep_id = curr_dep_id },
&ctx,
);
@@ -511,7 +533,7 @@ pub fn installIsolatedPackages(
const new_entry_is_workspace = !new_entry_is_root and dependencies[new_entry_dep_id].version.tag == .workspace;
const new_entry_dependencies: Store.Entry.Dependencies = if (dedupe_entry.found_existing and new_entry_is_workspace)
.empty
.init(lockfile.allocator)
else
try .initCapacity(lockfile.allocator, node_nodes[entry.node_id.get()].items.len);
@@ -562,7 +584,6 @@ pub fn installIsolatedPackages(
.dependencies = dependencies,
};
try entry_dependencies[entry_parent_id].insert(
lockfile.allocator,
.{ .entry_id = new_entry_id, .dep_id = new_entry_dep_id },
&ctx,
);
@@ -580,7 +601,6 @@ pub fn installIsolatedPackages(
const hoist_entry = try public_hoisted.getOrPut(dep_name);
if (!hoist_entry.found_existing) {
try entry_dependencies[0].insert(
lockfile.allocator,
.{ .entry_id = new_entry_id, .dep_id = new_entry_dep_id },
&ctx,
);

View File

@@ -503,7 +503,7 @@ pub const Installer = struct {
.hardlink => {
var src: bun.AbsPath(.{ .unit = .os, .sep = .auto }) = .initTopLevelDirLongPath();
defer src.deinit();
src.appendJoin(pkg_res.value.folder.slice(string_buf));
src.appendJoin(path);
var dest: bun.RelPath(.{ .unit = .os, .sep = .auto }) = .init();
defer dest.deinit();

View File

@@ -312,28 +312,33 @@ pub const Store = struct {
pub fn OrderedArraySet(comptime T: type, comptime Ctx: type) type {
return struct {
const Self = @This();
list: std.ArrayListUnmanaged(T) = .empty,
#allocator: std.mem.Allocator,
pub const empty: @This() = .{};
pub fn init(allocator: std.mem.Allocator) Self {
return .{ .#allocator = allocator };
}
pub fn initCapacity(allocator: std.mem.Allocator, n: usize) OOM!@This() {
pub fn initCapacity(allocator: std.mem.Allocator, n: usize) OOM!Self {
const list: std.ArrayListUnmanaged(T) = try .initCapacity(allocator, n);
return .{ .list = list };
return .{ .list = list, .#allocator = allocator };
}
pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void {
this.list.deinit(allocator);
pub fn deinit(self: *Self) void {
self.list.deinit(self.#allocator);
}
pub fn slice(this: *const @This()) []const T {
return this.list.items;
pub fn slice(self: *const Self) []const T {
return self.list.items;
}
pub fn len(this: *const @This()) usize {
return this.list.items.len;
pub fn len(self: *const Self) usize {
return self.list.items.len;
}
pub fn eql(l: *const @This(), r: *const @This(), ctx: *const Ctx) bool {
pub fn eql(l: *const Self, r: *const Self, ctx: *const Ctx) bool {
if (l.list.items.len != r.list.items.len) {
return false;
}
@@ -347,9 +352,9 @@ pub const Store = struct {
return true;
}
pub fn insert(this: *@This(), allocator: std.mem.Allocator, new: T, ctx: *const Ctx) OOM!void {
for (0..this.list.items.len) |i| {
const existing = this.list.items[i];
pub fn insert(self: *Self, new: T, ctx: *const Ctx) OOM!void {
for (0..self.list.items.len) |i| {
const existing = self.list.items[i];
if (ctx.eql(new, existing)) {
return;
}
@@ -361,17 +366,17 @@ pub const Store = struct {
}
if (order == .lt) {
try this.list.insert(allocator, i, new);
try self.list.insert(self.#allocator, i, new);
return;
}
}
try this.list.append(allocator, new);
try self.list.append(self.#allocator, new);
}
pub fn insertAssumeCapacity(this: *@This(), new: T, ctx: *const Ctx) void {
for (0..this.list.items.len) |i| {
const existing = this.list.items[i];
pub fn insertAssumeCapacity(self: *Self, new: T, ctx: *const Ctx) void {
for (0..self.list.items.len) |i| {
const existing = self.list.items[i];
if (ctx.eql(new, existing)) {
return;
}
@@ -383,12 +388,12 @@ pub const Store = struct {
}
if (order == .lt) {
this.list.insertAssumeCapacity(i, new);
self.list.insertAssumeCapacity(i, new);
return;
}
}
this.list.appendAssumeCapacity(new);
self.list.appendAssumeCapacity(new);
}
};
}
@@ -401,7 +406,7 @@ pub const Store = struct {
parent_id: Id,
dependencies: std.ArrayListUnmanaged(Ids) = .empty,
peers: Peers = .empty,
peers: Peers,
// each node in this list becomes a symlink in the package's node_modules
nodes: std.ArrayListUnmanaged(Id) = .empty,

View File

@@ -1154,68 +1154,6 @@ pub fn Package(comptime SemverIntType: type) type {
}
dependency_version.value.workspace = path;
} else {
const workspace = dependency_version.value.workspace.slice(buf);
const path = string_builder.append(String, if (strings.eqlComptime(workspace, "*")) "*" else brk: {
var buf2: bun.PathBuffer = undefined;
const rel = Path.relativePlatform(
FileSystem.instance.top_level_dir,
Path.joinAbsStringBuf(
FileSystem.instance.top_level_dir,
&buf2,
&[_]string{
source.path.name.dir,
workspace,
},
.auto,
),
.auto,
false,
);
if (comptime Environment.isWindows) {
bun.path.dangerouslyConvertPathToPosixInPlace(u8, Path.relative_to_common_path_buf[0..rel.len]);
}
break :brk rel;
});
if (comptime Environment.allow_assert) {
assert(path.len() > 0);
assert(!std.fs.path.isAbsolute(path.slice(buf)));
}
dependency_version.value.workspace = path;
const workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, name_hash);
const found_matching_workspace = workspace_entry.found_existing;
if (workspace_version) |ver| {
try lockfile.workspace_versions.put(allocator, name_hash, ver);
for (package_dependencies[0..dependencies_count]) |*package_dep| {
if (switch (package_dep.version.tag) {
// `dependencies` & `workspaces` defined within the same `package.json`
.npm => String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash and
package_dep.version.value.npm.version.satisfies(ver, buf, buf),
// `workspace:*`
.workspace => found_matching_workspace and
String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash,
else => false,
}) {
package_dep.version = dependency_version;
workspace_entry.value_ptr.* = path;
return null;
}
}
} else if (workspace_entry.found_existing) {
for (package_dependencies[0..dependencies_count]) |*package_dep| {
if (package_dep.version.tag == .workspace and
String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash)
{
package_dep.version = dependency_version;
return null;
}
}
return error.InstallFailed;
}
workspace_entry.value_ptr.* = path;
}
},
else => {},

View File

@@ -485,6 +485,22 @@ pub const String = extern struct {
}
}
pub fn inlineSlice(this: *const String) string {
bun.debugAssert(this.isInline());
return switch (this.bytes[0]) {
0 => "",
else => {
comptime var i: usize = 0;
inline while (i < this.bytes.len) : (i += 1) {
if (this.bytes[i] == 0) return this.bytes[0..i];
}
return &this.bytes;
},
};
}
pub const Builder = struct {
len: usize = 0,
cap: usize = 0,

View File

@@ -248,6 +248,95 @@ test("successfully installs workspace when path already exists in node_modules",
});
});
describe("root dependencies in workspace", () => {
const basicTests = ["workspace:*", "workspace:1.0.0", "workspace:../../"];
for (const basicTest of basicTests) {
test(`works with "${basicTest}"`, async () => {
const { packageDir } = await verdaccio.createTestDir({
bunfigOpts: { isolated: true },
files: {
"package.json": JSON.stringify({
name: "root",
version: "1.0.0",
workspaces: ["packages/*"],
}),
"packages/pkg1/package.json": JSON.stringify({
name: "pkg1",
dependencies: {
root: basicTest,
},
}),
},
});
await runBunInstall(env, packageDir);
expect(await file(join(packageDir, "packages", "pkg1", "node_modules", "root", "package.json")).json()).toEqual({
name: "root",
version: "1.0.0",
workspaces: ["packages/*"],
});
// another install succeeds
await rm(join(packageDir, "packages", "pkg1", "node_modules", "root"));
await runBunInstall(env, packageDir, { savesLockfile: false });
expect(await file(join(packageDir, "packages", "pkg1", "node_modules", "root", "package.json")).json()).toEqual({
name: "root",
version: "1.0.0",
workspaces: ["packages/*"],
});
});
}
test("multiple dependencies on root package", async () => {
const { packageDir } = await verdaccio.createTestDir({
bunfigOpts: { isolated: true },
files: {
"package.json": JSON.stringify({
name: "root",
version: "1.0.0",
workspaces: ["packages/*"],
dependencies: {
root: "workspace:1.0.0",
},
}),
"packages/pkg1/package.json": JSON.stringify({
name: "pkg1",
dependencies: {
root: "workspace:*",
},
}),
"packages/pkg2/package.json": JSON.stringify({
name: "pkg2",
dependencies: {
root: "workspace:../../",
},
}),
},
});
await runBunInstall(env, packageDir);
(
await Promise.all([
file(join(packageDir, "packages/pkg1/node_modules/root/package.json")).json(),
file(join(packageDir, "packages/pkg2/node_modules/root/package.json")).json(),
file(join(packageDir, "node_modules/root/package.json")).json(),
])
).map(rootJson => {
expect(rootJson).toEqual({
name: "root",
version: "1.0.0",
workspaces: ["packages/*"],
dependencies: {
root: "workspace:1.0.0",
},
});
});
});
});
test("adding workspace in workspace edits package.json with correct version (workspace:*)", async () => {
await Promise.all([
write(