mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 15:38:46 +00:00
Compare commits
4 Commits
claude/nod
...
dylan/dev-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa43500461 | ||
|
|
25ef5324ca | ||
|
|
e1a33fd2f9 | ||
|
|
37bd79682e |
@@ -1112,7 +1112,7 @@ pub const Package = extern struct {
|
||||
} else {
|
||||
// It doesn't satisfy, but a workspace shares the same name. Override the workspace with the other dependency
|
||||
for (package_dependencies[0..dependencies_count]) |*dep| {
|
||||
if (dep.name_hash == name_hash and dep.version.tag == .workspace) {
|
||||
if (dep.name_hash == name_hash and dep.behavior.isWorkspaceOnly()) {
|
||||
dep.* = .{
|
||||
.behavior = if (in_workspace) group.behavior.add(.workspace) else group.behavior,
|
||||
.name = external_alias.value,
|
||||
@@ -2141,49 +2141,58 @@ pub const Package = extern struct {
|
||||
};
|
||||
};
|
||||
|
||||
const TrustedDependenciesSet = Lockfile.TrustedDependenciesSet;
|
||||
const Aligner = install.Aligner;
|
||||
const string = []const u8;
|
||||
|
||||
// @sortImports
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const bun = @import("bun");
|
||||
const ArrayIdentityContext = bun.ArrayIdentityContext;
|
||||
const Behavior = Dependency.Behavior;
|
||||
const Bin = install.Bin;
|
||||
const Cloner = Lockfile.Cloner;
|
||||
const Dependency = bun.install.Dependency;
|
||||
const DependencySlice = Lockfile.DependencySlice;
|
||||
const Environment = bun.Environment;
|
||||
const Global = bun.Global;
|
||||
const JSON = bun.JSON;
|
||||
const Output = bun.Output;
|
||||
const PackageJSON = bun.PackageJSON;
|
||||
const Path = bun.path;
|
||||
const assert = bun.assert;
|
||||
const logger = bun.logger;
|
||||
const strings = bun.strings;
|
||||
const FileSystem = bun.fs.FileSystem;
|
||||
|
||||
const JSAst = bun.JSAst;
|
||||
const Expr = bun.JSAst.Expr;
|
||||
|
||||
const Semver = bun.Semver;
|
||||
const ExternalString = Semver.ExternalString;
|
||||
const String = Semver.String;
|
||||
|
||||
const install = bun.install;
|
||||
const Aligner = install.Aligner;
|
||||
const Bin = install.Bin;
|
||||
const ExternalStringList = install.ExternalStringList;
|
||||
const ExternalStringMap = install.ExternalStringMap;
|
||||
const Features = install.Features;
|
||||
const FileSystem = bun.fs.FileSystem;
|
||||
const Global = bun.Global;
|
||||
const JSAst = bun.JSAst;
|
||||
const JSON = bun.JSON;
|
||||
const Lockfile = install.Lockfile;
|
||||
const Npm = install.Npm;
|
||||
const Output = bun.Output;
|
||||
const PackageID = bun.install.PackageID;
|
||||
const PackageIDSlice = Lockfile.PackageIDSlice;
|
||||
const PackageJSON = bun.PackageJSON;
|
||||
const PackageManager = install.PackageManager;
|
||||
const PackageNameHash = install.PackageNameHash;
|
||||
const Path = bun.path;
|
||||
const Repository = install.Repository;
|
||||
const Resolution = bun.install.Resolution;
|
||||
const Semver = bun.Semver;
|
||||
const Stream = Lockfile.Stream;
|
||||
const String = Semver.String;
|
||||
const StringBuilder = Lockfile.StringBuilder;
|
||||
const TruncatedPackageNameHash = install.TruncatedPackageNameHash;
|
||||
const assert = bun.assert;
|
||||
const assertNoUninitializedPadding = Lockfile.assertNoUninitializedPadding;
|
||||
const bun = @import("bun");
|
||||
const default_trusted_dependencies = Lockfile.default_trusted_dependencies;
|
||||
const initializeStore = install.initializeStore;
|
||||
const install = bun.install;
|
||||
const invalid_package_id = install.invalid_package_id;
|
||||
const logger = bun.logger;
|
||||
const std = @import("std");
|
||||
const string = []const u8;
|
||||
const strings = bun.strings;
|
||||
|
||||
const Dependency = bun.install.Dependency;
|
||||
const Behavior = Dependency.Behavior;
|
||||
|
||||
const Lockfile = install.Lockfile;
|
||||
const Cloner = Lockfile.Cloner;
|
||||
const DependencySlice = Lockfile.DependencySlice;
|
||||
const PackageIDSlice = Lockfile.PackageIDSlice;
|
||||
const Stream = Lockfile.Stream;
|
||||
const StringBuilder = Lockfile.StringBuilder;
|
||||
const TrustedDependenciesSet = Lockfile.TrustedDependenciesSet;
|
||||
const assertNoUninitializedPadding = Lockfile.assertNoUninitializedPadding;
|
||||
const default_trusted_dependencies = Lockfile.default_trusted_dependencies;
|
||||
|
||||
@@ -1119,6 +1119,70 @@ fn PkgMap(comptime T: type) type {
|
||||
};
|
||||
}
|
||||
|
||||
pub const TextLockfileDepSorter = struct {
|
||||
fn cmpBehavior(l: Dependency.Behavior, r: Dependency.Behavior) std.math.Order {
|
||||
if (l.eq(r)) {
|
||||
return .eq;
|
||||
}
|
||||
|
||||
if (l.isWorkspaceOnly() != r.isWorkspaceOnly()) {
|
||||
// ensure isWorkspaceOnly deps are placed at the beginning
|
||||
return if (l.isWorkspaceOnly())
|
||||
.lt
|
||||
else
|
||||
.gt;
|
||||
}
|
||||
|
||||
if (l.isPeer() != r.isPeer()) {
|
||||
return if (l.isPeer())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
}
|
||||
|
||||
if (l.isProd() != r.isProd()) {
|
||||
return if (l.isProd())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
}
|
||||
|
||||
if (l.isOptional() != r.isOptional()) {
|
||||
return if (l.isOptional())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
}
|
||||
|
||||
if (l.isDev() != r.isDev()) {
|
||||
return if (l.isDev())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
}
|
||||
|
||||
if (l.isWorkspace() != r.isWorkspace()) {
|
||||
return if (l.isWorkspace())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
}
|
||||
|
||||
return .eq;
|
||||
}
|
||||
|
||||
pub fn isLessThan(string_buf: []const u8, l: Dependency, r: Dependency) bool {
|
||||
switch (cmpBehavior(l.behavior, r.behavior)) {
|
||||
.eq => {},
|
||||
else => |order| return order == .lt,
|
||||
}
|
||||
|
||||
const l_name = l.name.slice(string_buf);
|
||||
const r_name = r.name.slice(string_buf);
|
||||
return strings.cmpStringsAsc({}, l_name, r_name);
|
||||
}
|
||||
};
|
||||
|
||||
// const PkgMap = struct {};
|
||||
|
||||
pub fn parseIntoBinaryLockfile(
|
||||
@@ -1904,6 +1968,9 @@ pub fn parseIntoBinaryLockfile(
|
||||
lockfile.buffers.resolutions.expandToCapacity();
|
||||
@memset(lockfile.buffers.resolutions.items, invalid_package_id);
|
||||
|
||||
var seen_deps: bun.StringHashMap(void) = .init(allocator);
|
||||
defer seen_deps.deinit();
|
||||
|
||||
const pkgs = lockfile.packages.slice();
|
||||
const pkg_deps = pkgs.items(.dependencies);
|
||||
const pkg_names = pkgs.items(.name);
|
||||
@@ -1919,6 +1986,8 @@ pub fn parseIntoBinaryLockfile(
|
||||
const dep_id: DependencyID = @intCast(_dep_id);
|
||||
const dep = &lockfile.buffers.dependencies.items[dep_id];
|
||||
|
||||
const is_duplicate_dep = (try seen_deps.getOrPut(dep.name.slice(lockfile.buffers.string_bytes.items))).found_existing;
|
||||
|
||||
const res_id = pkg_map.get(dep.name.slice(lockfile.buffers.string_bytes.items)) orelse {
|
||||
if (dep.behavior.optional) {
|
||||
continue;
|
||||
@@ -1927,7 +1996,9 @@ pub fn parseIntoBinaryLockfile(
|
||||
return error.InvalidPackageInfo;
|
||||
};
|
||||
|
||||
mapDepToPkg(dep, dep_id, res_id, lockfile, pkg_resolutions);
|
||||
if (!is_duplicate_dep) {
|
||||
mapDepToPkg(dep, dep_id, res_id, lockfile, pkg_resolutions, is_duplicate_dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1939,12 +2010,16 @@ pub fn parseIntoBinaryLockfile(
|
||||
const pkg_id: PackageID = @intCast(_pkg_id);
|
||||
const workspace_name = pkg_names[pkg_id].slice(lockfile.buffers.string_bytes.items);
|
||||
|
||||
seen_deps.clearRetainingCapacity();
|
||||
|
||||
const deps = pkg_deps[pkg_id];
|
||||
for (deps.begin()..deps.end()) |_dep_id| {
|
||||
const dep_id: DependencyID = @intCast(_dep_id);
|
||||
const dep = &lockfile.buffers.dependencies.items[dep_id];
|
||||
const dep_name = dep.name.slice(lockfile.buffers.string_bytes.items);
|
||||
|
||||
const is_duplicate_dep = (try seen_deps.getOrPut(dep_name)).found_existing;
|
||||
|
||||
const workspace_node_modules = std.fmt.bufPrint(&path_buf, "{s}/{s}", .{ workspace_name, dep_name }) catch {
|
||||
try log.addErrorFmt(source, root_pkg_exr.loc, allocator, "Workspace and dependency name too long: '{s}/{s}'", .{ workspace_name, dep_name });
|
||||
return error.InvalidPackageInfo;
|
||||
@@ -1958,7 +2033,9 @@ pub fn parseIntoBinaryLockfile(
|
||||
return error.InvalidPackageInfo;
|
||||
};
|
||||
|
||||
mapDepToPkg(dep, dep_id, res_id, lockfile, pkg_resolutions);
|
||||
if (!is_duplicate_dep) {
|
||||
mapDepToPkg(dep, dep_id, res_id, lockfile, pkg_resolutions, is_duplicate_dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1973,12 +2050,16 @@ pub fn parseIntoBinaryLockfile(
|
||||
return error.InvalidPackagesObject;
|
||||
};
|
||||
|
||||
seen_deps.clearRetainingCapacity();
|
||||
|
||||
// find resolutions. iterate up to root through the pkg path.
|
||||
const deps = pkg_deps[pkg_id];
|
||||
deps: for (deps.begin()..deps.end()) |_dep_id| {
|
||||
const dep_id: DependencyID = @intCast(_dep_id);
|
||||
const dep = &lockfile.buffers.dependencies.items[dep_id];
|
||||
|
||||
const is_duplicate_dep = (try seen_deps.getOrPut(dep.name.slice(lockfile.buffers.string_bytes.items))).found_existing;
|
||||
|
||||
const res_id = pkg_map.findResolution(pkg_path, dep, lockfile.buffers.string_bytes.items, &path_buf) catch |err| switch (err) {
|
||||
error.InvalidPackageKey => {
|
||||
try log.addError(source, key.loc, "Invalid package path");
|
||||
@@ -1993,7 +2074,53 @@ pub fn parseIntoBinaryLockfile(
|
||||
},
|
||||
};
|
||||
|
||||
mapDepToPkg(dep, dep_id, res_id, lockfile, pkg_resolutions);
|
||||
if (!is_duplicate_dep) {
|
||||
mapDepToPkg(dep, dep_id, res_id, lockfile, pkg_resolutions, is_duplicate_dep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
const SortCtx = struct {
|
||||
dependencies: []Dependency,
|
||||
resolutions: []PackageID,
|
||||
string_buf: []const u8,
|
||||
|
||||
deps_slice: DependencySlice = .{},
|
||||
resolutions_slice: PackageIDSlice = .{},
|
||||
|
||||
pub fn lessThan(ctx: @This(), l: usize, r: usize) bool {
|
||||
const dependencies = ctx.dependencies;
|
||||
|
||||
const l_dep = dependencies[l];
|
||||
const r_dep = dependencies[r];
|
||||
|
||||
return Dependency.isLessThan(ctx.string_buf, l_dep, r_dep);
|
||||
}
|
||||
|
||||
pub fn swap(ctx: @This(), a: usize, b: usize) void {
|
||||
std.mem.swap(Dependency, &ctx.dependencies[a], &ctx.dependencies[b]);
|
||||
std.mem.swap(PackageID, &ctx.resolutions[a], &ctx.resolutions[b]);
|
||||
}
|
||||
};
|
||||
|
||||
var sort_ctx: SortCtx = .{
|
||||
.dependencies = lockfile.buffers.dependencies.items,
|
||||
.resolutions = lockfile.buffers.resolutions.items,
|
||||
.string_buf = lockfile.buffers.string_bytes.items,
|
||||
};
|
||||
|
||||
for (0..lockfile.packages.len) |_pkg_id| {
|
||||
const pkg_id: PackageID = @intCast(_pkg_id);
|
||||
|
||||
sort_ctx.deps_slice = pkgs.items(.dependencies)[pkg_id];
|
||||
sort_ctx.resolutions_slice = pkgs.items(.resolutions)[pkg_id];
|
||||
|
||||
std.sort.pdqContext(
|
||||
0,
|
||||
sort_ctx.dependencies.len,
|
||||
&sort_ctx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2008,12 +2135,15 @@ pub fn parseIntoBinaryLockfile(
|
||||
}
|
||||
}
|
||||
|
||||
fn mapDepToPkg(dep: *Dependency, dep_id: DependencyID, pkg_id: PackageID, lockfile: *BinaryLockfile, pkg_resolutions: []const Resolution) void {
|
||||
fn mapDepToPkg(dep: *Dependency, dep_id: DependencyID, pkg_id: PackageID, lockfile: *BinaryLockfile, pkg_resolutions: []const Resolution, is_duplicate_dep: bool) void {
|
||||
lockfile.buffers.resolutions.items[dep_id] = pkg_id;
|
||||
|
||||
if (lockfile.text_lockfile_version != .v0) {
|
||||
const res = &pkg_resolutions[pkg_id];
|
||||
if (res.tag == .workspace) {
|
||||
|
||||
// when a package has duplicate dependencies and one is a workspace
|
||||
// we don't want to override the other because it might not be a workspace.
|
||||
if (res.tag == .workspace and !is_duplicate_dep) {
|
||||
dep.version.tag = .workspace;
|
||||
dep.version.value = .{ .workspace = res.value.workspace };
|
||||
|
||||
@@ -2153,7 +2283,7 @@ fn parseAppendDependencies(
|
||||
Dependency,
|
||||
lockfile.buffers.dependencies.items[off..],
|
||||
buf.bytes.items,
|
||||
Dependency.isLessThan,
|
||||
TextLockfileDepSorter.isLessThan,
|
||||
);
|
||||
|
||||
return .{ @intCast(off), @intCast(end - off) };
|
||||
@@ -2190,3 +2320,4 @@ const Negatable = Npm.Negatable;
|
||||
const DependencyID = Install.DependencyID;
|
||||
const Bin = Install.Bin;
|
||||
const ExternalString = Semver.ExternalString;
|
||||
const PackageIDSlice = BinaryLockfile.PackageIDSlice;
|
||||
|
||||
323
test/cli/install/test-dev-peer-dependency-priority.test.ts
Normal file
323
test/cli/install/test-dev-peer-dependency-priority.test.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
test("workspace devDependencies should take priority over peerDependencies for resolution", async () => {
|
||||
const dir = tempDirWithFiles("dev-peer-priority", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-monorepo",
|
||||
version: "1.0.0",
|
||||
workspaces: {
|
||||
packages: ["packages/*"],
|
||||
nodeLinker: "isolated",
|
||||
},
|
||||
}),
|
||||
"packages/lib/package.json": JSON.stringify({
|
||||
name: "lib",
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"jquery": "workspace:*", // Use workspace protocol for dev
|
||||
},
|
||||
peerDependencies: {
|
||||
"jquery": "3.7.0", // Range wants 3.7.0
|
||||
},
|
||||
}),
|
||||
"packages/lib/test.js": `const dep = require("jquery"); console.log(dep.version);`,
|
||||
// Only provide workspace package with version 2.0.0
|
||||
"packages/my-dep/package.json": JSON.stringify({
|
||||
name: "jquery",
|
||||
version: "2.0.0",
|
||||
main: "index.js",
|
||||
}),
|
||||
"packages/my-dep/index.js": `module.exports = { version: "2.0.0" };`,
|
||||
});
|
||||
|
||||
// Run initial install
|
||||
let { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Now run bun install with a dead registry to ensure no network requests
|
||||
({ stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: {
|
||||
...bunEnv,
|
||||
NPM_CONFIG_REGISTRY: "http://localhost:9999/", // Dead URL - will fail if used
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that no network requests were made for packages that should be resolved locally
|
||||
expect(stderr).not.toContain("GET");
|
||||
expect(stderr).not.toContain("http");
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
|
||||
// Verify that version 2.0.0 (devDependency) was linked
|
||||
// If peerDependency range ^1.0.0 was used, it would try to fetch from npm and fail
|
||||
const testResult = await new Promise<string>(resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "packages/lib/test.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
new Response(proc.stdout).text().then(resolve);
|
||||
});
|
||||
|
||||
expect(testResult.trim()).toBe("2.0.0");
|
||||
});
|
||||
|
||||
test("devDependencies and peerDependencies with different versions should coexist", async () => {
|
||||
const dir = tempDirWithFiles("dev-peer-different-versions", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-monorepo",
|
||||
version: "1.0.0",
|
||||
workspaces: {
|
||||
packages: ["packages/*"],
|
||||
nodeLinker: "isolated",
|
||||
},
|
||||
}),
|
||||
"packages/lib/package.json": JSON.stringify({
|
||||
name: "lib",
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"utils": "1.0.0",
|
||||
},
|
||||
peerDependencies: {
|
||||
"utils": "^1.0.0",
|
||||
},
|
||||
}),
|
||||
"packages/lib/index.js": `console.log("lib");`,
|
||||
"packages/utils/package.json": JSON.stringify({
|
||||
name: "utils",
|
||||
version: "1.0.0",
|
||||
main: "index.js",
|
||||
}),
|
||||
"packages/utils/index.js": `console.log("utils");`,
|
||||
});
|
||||
|
||||
// Run bun install in the monorepo
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
});
|
||||
|
||||
test("dependency behavior comparison prioritizes devDependencies", async () => {
|
||||
const dir = tempDirWithFiles("behavior-comparison", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "test-app",
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"typescript": "^5.0.0",
|
||||
},
|
||||
peerDependencies: {
|
||||
"typescript": "^4.0.0 || ^5.0.0",
|
||||
},
|
||||
}),
|
||||
"index.js": `console.log("app");`,
|
||||
});
|
||||
|
||||
// Run bun install
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
});
|
||||
|
||||
test("Next.js monorepo scenario should not make unnecessary network requests", async () => {
|
||||
const dir = tempDirWithFiles("nextjs-monorepo", {
|
||||
"package.json": JSON.stringify({
|
||||
name: "nextjs-monorepo",
|
||||
version: "1.0.0",
|
||||
workspaces: {
|
||||
packages: ["packages/*"],
|
||||
nodeLinker: "isolated",
|
||||
},
|
||||
}),
|
||||
"packages/web/package.json": JSON.stringify({
|
||||
name: "web",
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"next": "15.0.0-canary.119", // Specific canary version for dev
|
||||
},
|
||||
peerDependencies: {
|
||||
"next": "^14.0.0 || ^15.0.0", // Range that would accept 14.x or 15.x stable
|
||||
},
|
||||
}),
|
||||
"packages/web/test.js": `const next = require("next/package.json"); console.log(next.version);`,
|
||||
// Only provide the canary version that matches devDependencies
|
||||
"packages/next/package.json": JSON.stringify({
|
||||
name: "next",
|
||||
version: "15.0.0-canary.119",
|
||||
main: "index.js",
|
||||
}),
|
||||
"packages/next/index.js": `console.log("next workspace");`,
|
||||
});
|
||||
|
||||
// Run initial install
|
||||
let { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
console.error("stdout:", stdout);
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// Run bun install with dead registry
|
||||
({ stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: {
|
||||
...bunEnv,
|
||||
NPM_CONFIG_REGISTRY: "http://localhost:9999/", // Dead URL
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
// The key test: should not make network requests for packages that exist in workspace
|
||||
// When devDependencies are prioritized over peerDependencies, the workspace version should be used
|
||||
expect(stderr).not.toContain("GET");
|
||||
expect(stderr).not.toContain("404");
|
||||
expect(stderr).not.toContain("http");
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
|
||||
// Verify that version 15.0.0-canary.119 (devDependency) was used
|
||||
// If peer range was used, it would try to fetch a stable version from npm and fail
|
||||
const testResult = await new Promise<string>(resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "packages/web/test.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
new Response(proc.stdout).text().then(resolve);
|
||||
});
|
||||
|
||||
expect(testResult.trim()).toBe("15.0.0-canary.119");
|
||||
});
|
||||
Reference in New Issue
Block a user