mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 08:28:55 +00:00
Compare commits
4 Commits
dylan/dev-
...
jarred/dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8ac093817d | ||
|
|
8b03f2434a | ||
|
|
438e27e99c | ||
|
|
1042043d9b |
@@ -951,22 +951,14 @@ endif()
|
||||
|
||||
if(APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-dead_strip
|
||||
-dead_strip_dylibs
|
||||
-Wl,-ld_new
|
||||
-Wl,-no_compact_unwind
|
||||
-Wl,-stack_size,0x1200000
|
||||
-fno-keep-static-consts
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
if(NOT DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-dead_strip
|
||||
-dead_strip_dylibs
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
@@ -1003,6 +995,7 @@ if(LINUX)
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,--gc-sections
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
-Wl,-z,lazy
|
||||
@@ -1018,15 +1011,6 @@ if(LINUX)
|
||||
-Wl,--build-id=sha1 # Better for debugging than default
|
||||
-Wl,-Map=${bun}.linker-map
|
||||
)
|
||||
|
||||
# don't strip in debug, this seems to be needed so that the Zig std library
|
||||
# `*dbHelper` DWARF symbols (used by LLDB for pretty printing) are in the
|
||||
# output executable
|
||||
if(NOT DEBUG)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--gc-sections
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- Symbols list ---
|
||||
|
||||
@@ -124,7 +124,7 @@ const argv0 = argv0_stdout.toString().trim();
|
||||
|
||||
console.log(`Testing ${argv0} v${revision}`);
|
||||
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.26100.0\\shared\\ntstatus.h";
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h";
|
||||
let ntstatus_header_cache = null;
|
||||
function lookupWindowsError(code) {
|
||||
if (ntstatus_header_cache === null) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 14
|
||||
# Version: 13
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -195,17 +195,6 @@ download_file() {
|
||||
print "$file_tmp_path"
|
||||
}
|
||||
|
||||
# path=$(download_and_verify_file URL sha256)
|
||||
download_and_verify_file() {
|
||||
file_url="$1"
|
||||
hash="$2"
|
||||
|
||||
path=$(download_file "$file_url")
|
||||
execute sh -c 'echo "'"$hash $path"'" | sha256sum -c' >/dev/null 2>&1
|
||||
|
||||
print "$path"
|
||||
}
|
||||
|
||||
append_to_profile() {
|
||||
content="$1"
|
||||
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
|
||||
@@ -411,7 +400,7 @@ check_package_manager() {
|
||||
pm="brew"
|
||||
;;
|
||||
linux)
|
||||
if [ -f "$(which apt-get)" ]; then
|
||||
if [ -f "$(which apt)" ]; then
|
||||
pm="apt"
|
||||
elif [ -f "$(which dnf)" ]; then
|
||||
pm="dnf"
|
||||
@@ -481,8 +470,10 @@ check_ulimit() {
|
||||
|
||||
print "Checking ulimits..."
|
||||
systemd_conf="/etc/systemd/system.conf"
|
||||
limits_conf="/etc/security/limits.d/99-unlimited.conf"
|
||||
create_file "$limits_conf"
|
||||
if [ -f "$systemd_conf" ]; then
|
||||
limits_conf="/etc/security/limits.d/99-unlimited.conf"
|
||||
create_file "$limits_conf"
|
||||
fi
|
||||
|
||||
limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue"
|
||||
for limit in $limits; do
|
||||
@@ -504,10 +495,6 @@ check_ulimit() {
|
||||
fi
|
||||
|
||||
if [ -f "$systemd_conf" ]; then
|
||||
# in systemd's configuration you need to say "infinity" when you mean "unlimited"
|
||||
if [ "$limit_value" = "unlimited" ]; then
|
||||
limit_value="infinity"
|
||||
fi
|
||||
append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
|
||||
fi
|
||||
done
|
||||
@@ -562,7 +549,7 @@ check_ulimit() {
|
||||
package_manager() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
execute_sudo apt-get "$@"
|
||||
execute_sudo apt "$@"
|
||||
;;
|
||||
dnf)
|
||||
case "$distro" in
|
||||
@@ -611,7 +598,6 @@ install_packages() {
|
||||
package_manager install \
|
||||
--yes \
|
||||
--no-install-recommends \
|
||||
--fix-missing \
|
||||
"$@"
|
||||
;;
|
||||
dnf)
|
||||
@@ -687,7 +673,7 @@ install_common_software() {
|
||||
esac
|
||||
|
||||
case "$distro" in
|
||||
amzn | alpine)
|
||||
amzn)
|
||||
install_packages \
|
||||
tar
|
||||
;;
|
||||
@@ -1376,58 +1362,6 @@ install_chromium() {
|
||||
esac
|
||||
}
|
||||
|
||||
install_age() {
|
||||
# we only use this to encrypt core dumps, which we only have on Linux
|
||||
case "$os" in
|
||||
linux)
|
||||
age_tarball=""
|
||||
case "$arch" in
|
||||
x64)
|
||||
age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-amd64.tar.gz 7df45a6cc87d4da11cc03a539a7470c15b1041ab2b396af088fe9990f7c79d50)"
|
||||
;;
|
||||
aarch64)
|
||||
age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-arm64.tar.gz 57fd79a7ece5fe501f351b9dd51a82fbee1ea8db65a8839db17f5c080245e99f)"
|
||||
;;
|
||||
esac
|
||||
|
||||
age_extract_dir="$(create_tmp_directory)"
|
||||
execute tar -C "$age_extract_dir" -zxf "$age_tarball" age/age
|
||||
move_to_bin "$age_extract_dir/age/age"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
configure_core_dumps() {
|
||||
# we only have core dumps on Linux
|
||||
case "$os" in
|
||||
linux)
|
||||
# set up a directory that the test runner will look in after running tests
|
||||
cores_dir="/var/bun-cores-$distro-$release-$arch"
|
||||
sysctl_file="/etc/sysctl.d/local.conf"
|
||||
create_directory "$cores_dir"
|
||||
# ensure core_pattern will point there
|
||||
# %e = executable filename
|
||||
# %p = pid
|
||||
append_file "$sysctl_file" "kernel.core_pattern = $cores_dir/%e-%p.core"
|
||||
|
||||
# disable apport.service if it exists since it will override the core_pattern
|
||||
if which systemctl >/dev/null; then
|
||||
if systemctl list-unit-files apport.service >/dev/null; then
|
||||
execute_sudo "$systemctl" disable --now apport.service
|
||||
fi
|
||||
fi
|
||||
|
||||
# load the new configuration
|
||||
execute_sudo sysctl -p "$sysctl_file"
|
||||
|
||||
# ensure that a regular user will be able to run sysctl
|
||||
if [ -d /sbin ]; then
|
||||
append_to_path /sbin
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
clean_system() {
|
||||
if ! [ "$ci" = "1" ]; then
|
||||
return
|
||||
@@ -1453,8 +1387,6 @@ main() {
|
||||
install_build_essentials
|
||||
install_chromium
|
||||
install_fuse_python
|
||||
install_age
|
||||
configure_core_dumps
|
||||
clean_system
|
||||
}
|
||||
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { basename, join } from "node:path";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
// usage: bun debug-coredump.ts
|
||||
// -p <PID of the test that crashed> (buildkite should show this)
|
||||
// -b <URL to the bun-profile.zip artifact for the appropriate platform>
|
||||
// -c <URL to the bun-cores.tar.gz.age artifact for the appropriate platform>
|
||||
// -d <debugger> (default: lldb)
|
||||
const {
|
||||
values: { pid: stringPid, ["build-url"]: buildUrl, ["cores-url"]: coresUrl, debugger: debuggerPath },
|
||||
} = parseArgs({
|
||||
options: {
|
||||
pid: { type: "string", short: "p" },
|
||||
["build-url"]: { type: "string", short: "b" },
|
||||
["cores-url"]: { type: "string", short: "c" },
|
||||
debugger: { type: "string", short: "d", default: "lldb" },
|
||||
},
|
||||
});
|
||||
|
||||
if (stringPid === undefined) throw new Error("no PID given");
|
||||
const pid = parseInt(stringPid);
|
||||
if (buildUrl === undefined) throw new Error("no build-url given");
|
||||
if (coresUrl === undefined) throw new Error("no cores-url given");
|
||||
if (!process.env.AGE_CORES_IDENTITY?.startsWith("AGE-SECRET-KEY-"))
|
||||
throw new Error("no identity given in $AGE_CORES_IDENTITY");
|
||||
|
||||
const id = Bun.hash(buildUrl + coresUrl).toString(36);
|
||||
const dir = join(tmpdir(), `debug-coredump-${id}.tmp`);
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
|
||||
if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${pid}.core`))) {
|
||||
console.log("downloading bun-profile.zip");
|
||||
const zip = await (await fetch(buildUrl)).arrayBuffer();
|
||||
await Bun.write(join(dir, "bun-profile.zip"), zip);
|
||||
// -j: junk paths (don't create directories when extracting)
|
||||
// -o: overwrite without prompting
|
||||
// -d: extract to this directory instead of cwd
|
||||
await Bun.$`unzip -j -o ${join(dir, "bun-profile.zip")} -d ${dir}`;
|
||||
|
||||
console.log("downloading cores");
|
||||
const cores = await (await fetch(coresUrl)).arrayBuffer();
|
||||
await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`;
|
||||
|
||||
console.log("moving cores out of nested directory");
|
||||
for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) {
|
||||
fs.renameSync(join(dir, file), join(dir, basename(file)));
|
||||
}
|
||||
} else {
|
||||
console.log(`already downloaded in ${dir}`);
|
||||
}
|
||||
|
||||
console.log("launching debugger:");
|
||||
console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`);
|
||||
|
||||
const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], {
|
||||
stdin: "inherit",
|
||||
stdout: "inherit",
|
||||
stderr: "inherit",
|
||||
});
|
||||
await proc.exited;
|
||||
process.exit(proc.exitCode);
|
||||
@@ -51,7 +51,6 @@ import {
|
||||
isBuildkite,
|
||||
isCI,
|
||||
isGithubAction,
|
||||
isLinux,
|
||||
isMacOS,
|
||||
isWindows,
|
||||
isX64,
|
||||
@@ -60,7 +59,6 @@ import {
|
||||
startGroup,
|
||||
tmpdir,
|
||||
unzip,
|
||||
uploadArtifact,
|
||||
} from "./utils.mjs";
|
||||
let isQuiet = false;
|
||||
const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
|
||||
@@ -148,10 +146,6 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
type: "boolean",
|
||||
default: isBuildkite,
|
||||
},
|
||||
["coredump-upload"]: {
|
||||
type: "boolean",
|
||||
default: isBuildkite && isLinux,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -611,78 +605,6 @@ async function runTests() {
|
||||
}
|
||||
}
|
||||
|
||||
if (options["coredump-upload"]) {
|
||||
try {
|
||||
// this sysctl is set in bootstrap.sh to /var/bun-cores-$distro-$release-$arch
|
||||
const sysctl = await spawnSafe({ command: "sysctl", args: ["-n", "kernel.core_pattern"] });
|
||||
let coresDir = sysctl.stdout;
|
||||
if (sysctl.ok) {
|
||||
if (coresDir.startsWith("|")) {
|
||||
throw new Error("cores are being piped not saved");
|
||||
}
|
||||
// change /foo/bar/%e-%p.core to /foo/bar
|
||||
coresDir = dirname(sysctl.stdout);
|
||||
} else {
|
||||
throw new Error(`Failed to check core_pattern: ${sysctl.error}`);
|
||||
}
|
||||
|
||||
const coresDirBase = dirname(coresDir);
|
||||
const coresDirName = basename(coresDir);
|
||||
const coreFileNames = readdirSync(coresDir);
|
||||
|
||||
if (coreFileNames.length > 0) {
|
||||
console.log(`found ${coreFileNames.length} cores in ${coresDir}`);
|
||||
let totalBytes = 0;
|
||||
let totalBlocks = 0;
|
||||
for (const f of coreFileNames) {
|
||||
const stat = statSync(join(coresDir, f));
|
||||
totalBytes += stat.size;
|
||||
totalBlocks += stat.blocks;
|
||||
}
|
||||
console.log(`total apparent size = ${totalBytes} bytes`);
|
||||
console.log(`total size on disk = ${512 * totalBlocks} bytes`);
|
||||
const outdir = mkdtempSync(join(tmpdir(), "cores-upload"));
|
||||
const outfileName = `${coresDirName}.tar.gz.age`;
|
||||
const outfileAbs = join(outdir, outfileName);
|
||||
|
||||
// This matches an age identity known by Bun employees. Core dumps from CI have to be kept
|
||||
// secret since they will contain API keys.
|
||||
const ageRecipient = "age1eunsrgxwjjpzr48hm0y98cw2vn5zefjagt4r0qj4503jg2nxedqqkmz6fu"; // reject external PRs changing this, see above
|
||||
|
||||
// Run tar in the parent directory of coresDir so that it creates archive entries with
|
||||
// coresDirName in them. This way when you extract the tarball you get a folder named
|
||||
// bun-cores-XYZ containing core files, instead of a bunch of core files strewn in your
|
||||
// current directory
|
||||
const before = Date.now();
|
||||
const zipAndEncrypt = await spawnSafe({
|
||||
command: "bash",
|
||||
args: [
|
||||
"-c",
|
||||
// tar -S: handle sparse files efficiently
|
||||
`set -euo pipefail && tar -Sc "$0" | gzip -1 | age -e -r ${ageRecipient} -o "$1"`,
|
||||
// $0
|
||||
coresDirName,
|
||||
// $1
|
||||
outfileAbs,
|
||||
],
|
||||
cwd: coresDirBase,
|
||||
stdout: () => {},
|
||||
timeout: 60_000,
|
||||
});
|
||||
const elapsed = Date.now() - before;
|
||||
if (!zipAndEncrypt.ok) {
|
||||
throw new Error(zipAndEncrypt.error);
|
||||
}
|
||||
console.log(`saved core dumps to ${outfileAbs} (${statSync(outfileAbs).size} bytes) in ${elapsed} ms`);
|
||||
await uploadArtifact(outfileAbs);
|
||||
} else {
|
||||
console.log(`no cores found in ${coresDir}`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error collecting and uploading core dumps:", err);
|
||||
}
|
||||
}
|
||||
|
||||
if (!isCI && !isQuiet) {
|
||||
console.table({
|
||||
"Total Tests": okResults.length + failedResults.length + flakyResults.length,
|
||||
@@ -858,7 +780,6 @@ async function spawnSafe(options) {
|
||||
const [, message] = error || [];
|
||||
error = message ? message.split("\n")[0].toLowerCase() : "crash";
|
||||
error = error.indexOf("\\n") !== -1 ? error.substring(0, error.indexOf("\\n")) : error;
|
||||
error = `pid ${subprocess.pid} ${error}`;
|
||||
} else if (signalCode) {
|
||||
if (signalCode === "SIGTERM" && duration >= timeout) {
|
||||
error = "timeout";
|
||||
@@ -950,7 +871,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
};
|
||||
|
||||
if (basename(execPath).includes("asan")) {
|
||||
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0";
|
||||
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1";
|
||||
}
|
||||
|
||||
if (isWindows && bunEnv.Path) {
|
||||
@@ -1102,7 +1023,7 @@ function getTestTimeout(testPath) {
|
||||
if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) {
|
||||
return integrationTimeout;
|
||||
}
|
||||
if (/napi/i.test(testPath) || /v8/i.test(testPath)) {
|
||||
if (/napi/i.test(testPath)) {
|
||||
return napiTimeout;
|
||||
}
|
||||
return testTimeout;
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
} from "node:fs";
|
||||
import { connect } from "node:net";
|
||||
import { hostname, homedir as nodeHomedir, tmpdir as nodeTmpdir, release, userInfo } from "node:os";
|
||||
import { basename, dirname, join, relative, resolve } from "node:path";
|
||||
import { dirname, join, relative, resolve } from "node:path";
|
||||
import { normalize as normalizeWindows } from "node:path/win32";
|
||||
|
||||
export const isWindows = process.platform === "win32";
|
||||
@@ -1370,16 +1370,13 @@ export async function getLastSuccessfulBuild() {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} filename Absolute path to file to upload
|
||||
* @param {string} filename
|
||||
* @param {string} [cwd]
|
||||
*/
|
||||
export async function uploadArtifact(filename) {
|
||||
export async function uploadArtifact(filename, cwd) {
|
||||
if (isBuildkite) {
|
||||
await spawnSafe(["buildkite-agent", "artifact", "upload", basename(filename)], {
|
||||
cwd: dirname(filename),
|
||||
stdio: "inherit",
|
||||
});
|
||||
} else {
|
||||
console.warn(`not in buildkite. artifact ${filename} not uploaded.`);
|
||||
const relativePath = relative(cwd ?? process.cwd(), filename);
|
||||
await spawnSafe(["buildkite-agent", "artifact", "upload", relativePath], { cwd, stdio: "inherit" });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -56,6 +56,8 @@ behavior: Behavior = .{},
|
||||
/// 2. name ASC
|
||||
/// "name" must be ASC so that later, when we rebuild the lockfile
|
||||
/// we insert it back in reverse order without an extra sorting pass
|
||||
/// Note: For workspace packages with both dev and peer dependencies,
|
||||
/// dev dependencies are prioritized to prevent unnecessary network requests
|
||||
pub fn isLessThan(string_buf: []const u8, lhs: Dependency, rhs: Dependency) bool {
|
||||
const behavior = lhs.behavior.cmp(rhs.behavior);
|
||||
if (behavior != .eq) {
|
||||
@@ -1420,6 +1422,19 @@ pub const Behavior = packed struct(u8) {
|
||||
.lt;
|
||||
}
|
||||
|
||||
// Special handling for workspace packages with both dev and peer dependencies
|
||||
// If both behaviors have workspace flag, prioritize dev over peer
|
||||
if (lhs.workspace and rhs.workspace) {
|
||||
if (lhs.dev and lhs.peer and rhs.peer and !rhs.dev) {
|
||||
// lhs is workspace + dev + peer, rhs is workspace + peer only
|
||||
return .gt;
|
||||
}
|
||||
if (rhs.dev and rhs.peer and lhs.peer and !lhs.dev) {
|
||||
// rhs is workspace + dev + peer, lhs is workspace + peer only
|
||||
return .lt;
|
||||
}
|
||||
}
|
||||
|
||||
if (lhs.isDev() != rhs.isDev()) {
|
||||
return if (lhs.isDev())
|
||||
.gt
|
||||
@@ -1427,15 +1442,15 @@ pub const Behavior = packed struct(u8) {
|
||||
.lt;
|
||||
}
|
||||
|
||||
if (lhs.isOptional() != rhs.isOptional()) {
|
||||
return if (lhs.isOptional())
|
||||
if (lhs.isPeer() != rhs.isPeer()) {
|
||||
return if (lhs.isPeer())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
}
|
||||
|
||||
if (lhs.isPeer() != rhs.isPeer()) {
|
||||
return if (lhs.isPeer())
|
||||
if (lhs.isOptional() != rhs.isOptional()) {
|
||||
return if (lhs.isOptional())
|
||||
.gt
|
||||
else
|
||||
.lt;
|
||||
|
||||
@@ -167,7 +167,6 @@ pub const Features = struct {
|
||||
};
|
||||
|
||||
pub const workspace = Features{
|
||||
.check_for_duplicate_dependencies = true,
|
||||
.dev_dependencies = true,
|
||||
.optional_dependencies = true,
|
||||
.trusted_dependencies = true,
|
||||
|
||||
@@ -1064,11 +1064,11 @@ pub const Package = extern struct {
|
||||
else => external_alias.hash,
|
||||
};
|
||||
|
||||
var has_workspace_path: ?String = null;
|
||||
var has_workspace_version = workspace_ver;
|
||||
var workspace_path: ?String = null;
|
||||
var workspace_version = workspace_ver;
|
||||
if (comptime tag == null) {
|
||||
has_workspace_path = lockfile.workspace_paths.get(name_hash);
|
||||
has_workspace_version = lockfile.workspace_versions.get(name_hash);
|
||||
workspace_path = lockfile.workspace_paths.get(name_hash);
|
||||
workspace_version = lockfile.workspace_versions.get(name_hash);
|
||||
}
|
||||
|
||||
if (comptime tag != null) {
|
||||
@@ -1093,9 +1093,9 @@ pub const Package = extern struct {
|
||||
},
|
||||
.npm => {
|
||||
const npm = dependency_version.value.npm;
|
||||
if (has_workspace_version) |workspace_version| {
|
||||
if (pm.options.link_workspace_packages and npm.version.satisfies(workspace_version, buf, buf)) {
|
||||
const path = has_workspace_path.?.sliced(buf);
|
||||
if (workspace_version != null) {
|
||||
if (pm.options.link_workspace_packages and npm.version.satisfies(workspace_version.?, buf, buf)) {
|
||||
const path = workspace_path.?.sliced(buf);
|
||||
if (Dependency.parseWithTag(
|
||||
allocator,
|
||||
external_alias.value,
|
||||
@@ -1112,7 +1112,7 @@ pub const Package = extern struct {
|
||||
} else {
|
||||
// It doesn't satisfy, but a workspace shares the same name. Override the workspace with the other dependency
|
||||
for (package_dependencies[0..dependencies_count]) |*dep| {
|
||||
if (dep.name_hash == name_hash and dep.behavior.isWorkspaceOnly()) {
|
||||
if (dep.name_hash == name_hash and dep.version.tag == .workspace) {
|
||||
dep.* = .{
|
||||
.behavior = if (in_workspace) group.behavior.add(.workspace) else group.behavior,
|
||||
.name = external_alias.value,
|
||||
@@ -1126,11 +1126,11 @@ pub const Package = extern struct {
|
||||
}
|
||||
},
|
||||
.workspace => workspace: {
|
||||
if (has_workspace_path) |workspace_path| {
|
||||
if (workspace_path) |path| {
|
||||
if (workspace_range) |range| {
|
||||
if (has_workspace_version) |workspace_version| {
|
||||
if (range.satisfies(workspace_version, buf, buf)) {
|
||||
dependency_version.value.workspace = workspace_path;
|
||||
if (workspace_version) |ver| {
|
||||
if (range.satisfies(ver, buf, buf)) {
|
||||
dependency_version.value.workspace = path;
|
||||
break :workspace;
|
||||
}
|
||||
}
|
||||
@@ -1138,7 +1138,7 @@ pub const Package = extern struct {
|
||||
// important to trim before len == 0 check. `workspace:foo@ ` should install successfully
|
||||
const version_literal = strings.trim(range.input, &strings.whitespace_chars);
|
||||
if (version_literal.len == 0 or range.@"is *"() or Semver.Version.isTaggedVersionOnly(version_literal)) {
|
||||
dependency_version.value.workspace = workspace_path;
|
||||
dependency_version.value.workspace = path;
|
||||
break :workspace;
|
||||
}
|
||||
|
||||
@@ -1157,7 +1157,7 @@ pub const Package = extern struct {
|
||||
return error.InstallFailed;
|
||||
}
|
||||
|
||||
dependency_version.value.workspace = workspace_path;
|
||||
dependency_version.value.workspace = path;
|
||||
} else {
|
||||
const workspace = dependency_version.value.workspace.slice(buf);
|
||||
const path = string_builder.append(String, if (strings.eqlComptime(workspace, "*")) "*" else brk: {
|
||||
@@ -1190,13 +1190,13 @@ pub const Package = extern struct {
|
||||
const workspace_entry = try lockfile.workspace_paths.getOrPut(allocator, name_hash);
|
||||
const found_matching_workspace = workspace_entry.found_existing;
|
||||
|
||||
if (has_workspace_version) |workspace_version| {
|
||||
try lockfile.workspace_versions.put(allocator, name_hash, workspace_version);
|
||||
if (workspace_version) |ver| {
|
||||
try lockfile.workspace_versions.put(allocator, name_hash, ver);
|
||||
for (package_dependencies[0..dependencies_count]) |*package_dep| {
|
||||
if (switch (package_dep.version.tag) {
|
||||
// `dependencies` & `workspaces` defined within the same `package.json`
|
||||
.npm => String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash and
|
||||
package_dep.version.value.npm.version.satisfies(workspace_version, buf, buf),
|
||||
package_dep.version.value.npm.version.satisfies(ver, buf, buf),
|
||||
// `workspace:*`
|
||||
.workspace => found_matching_workspace and
|
||||
String.Builder.stringHash(package_dep.realname().slice(buf)) == name_hash,
|
||||
@@ -1234,25 +1234,19 @@ pub const Package = extern struct {
|
||||
|
||||
// `peerDependencies` may be specified on existing dependencies. Packages in `workspaces` are deduplicated when
|
||||
// the array is processed
|
||||
if (comptime features.check_for_duplicate_dependencies) {
|
||||
if (!this_dep.behavior.isWorkspaceOnly()) {
|
||||
const entry = lockfile.scratch.duplicate_checker_map.getOrPutAssumeCapacity(external_alias.hash);
|
||||
if (entry.found_existing) {
|
||||
// duplicate dependencies are allowed in optionalDependencies and devDependencies. choose dev over others
|
||||
if (comptime features.check_for_duplicate_dependencies and !group.behavior.isPeer() and !group.behavior.isWorkspace()) {
|
||||
const entry = lockfile.scratch.duplicate_checker_map.getOrPutAssumeCapacity(external_alias.hash);
|
||||
if (entry.found_existing) {
|
||||
// duplicate dependencies are allowed in optionalDependencies
|
||||
if (comptime group.behavior.isOptional()) {
|
||||
for (package_dependencies[0..dependencies_count]) |*package_dep| {
|
||||
if (package_dep.name_hash == this_dep.name_hash) {
|
||||
if (comptime group.behavior.isOptional() or group.behavior.isDev()) {
|
||||
package_dep.* = this_dep;
|
||||
return null;
|
||||
}
|
||||
|
||||
if (package_dep.behavior.isDev()) {
|
||||
// choose the existing one.
|
||||
return null;
|
||||
}
|
||||
package_dep.* = this_dep;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
} else {
|
||||
var notes = try allocator.alloc(logger.Data, 1);
|
||||
|
||||
notes[0] = .{
|
||||
@@ -1269,9 +1263,9 @@ pub const Package = extern struct {
|
||||
.{external_alias.slice(buf)},
|
||||
);
|
||||
}
|
||||
|
||||
entry.value_ptr.* = value_loc;
|
||||
}
|
||||
|
||||
entry.value_ptr.* = value_loc;
|
||||
}
|
||||
|
||||
return this_dep;
|
||||
|
||||
@@ -65,30 +65,12 @@ function validateLinkHeaderValue(hints) {
|
||||
);
|
||||
}
|
||||
|
||||
function validateString(value, name) {
|
||||
if (typeof value !== "string") throw $ERR_INVALID_ARG_TYPE(name, "string", value);
|
||||
}
|
||||
|
||||
function validateFunction(value, name) {
|
||||
if (typeof value !== "function") throw $ERR_INVALID_ARG_TYPE(name, "function", value);
|
||||
}
|
||||
|
||||
function validateBoolean(value, name) {
|
||||
if (typeof value !== "boolean") throw $ERR_INVALID_ARG_TYPE(name, "boolean", value);
|
||||
}
|
||||
|
||||
function validateUndefined(value, name) {
|
||||
if (value !== undefined) throw $ERR_INVALID_ARG_TYPE(name, "undefined", value);
|
||||
}
|
||||
|
||||
function validateInternalField(object, fieldKey, className) {
|
||||
if (typeof object !== "object" || object === null || !ObjectPrototypeHasOwnProperty.$call(object, fieldKey)) {
|
||||
throw $ERR_INVALID_ARG_TYPE("this", className, object);
|
||||
}
|
||||
}
|
||||
|
||||
hideFromStack(validateLinkHeaderValue, validateInternalField);
|
||||
hideFromStack(validateString, validateFunction, validateBoolean, validateUndefined);
|
||||
|
||||
export default {
|
||||
/** (value, name) */
|
||||
@@ -100,15 +82,15 @@ export default {
|
||||
/** `(value, name, min, max)` */
|
||||
validateNumber: $newCppFunction("NodeValidator.cpp", "jsFunction_validateNumber", 0),
|
||||
/** `(value, name)` */
|
||||
validateString,
|
||||
validateString: $newCppFunction("NodeValidator.cpp", "jsFunction_validateString", 0),
|
||||
/** `(number, name)` */
|
||||
validateFiniteNumber: $newCppFunction("NodeValidator.cpp", "jsFunction_validateFiniteNumber", 0),
|
||||
/** `(number, name, lower, upper, def)` */
|
||||
checkRangesOrGetDefault: $newCppFunction("NodeValidator.cpp", "jsFunction_checkRangesOrGetDefault", 0),
|
||||
/** `(value, name)` */
|
||||
validateFunction,
|
||||
validateFunction: $newCppFunction("NodeValidator.cpp", "jsFunction_validateFunction", 0),
|
||||
/** `(value, name)` */
|
||||
validateBoolean,
|
||||
validateBoolean: $newCppFunction("NodeValidator.cpp", "jsFunction_validateBoolean", 0),
|
||||
/** `(port, name = 'Port', allowZero = true)` */
|
||||
validatePort: $newCppFunction("NodeValidator.cpp", "jsFunction_validatePort", 0),
|
||||
/** `(signal, name)` */
|
||||
@@ -126,7 +108,7 @@ export default {
|
||||
/** `(value, name)` */
|
||||
validatePlainFunction: $newCppFunction("NodeValidator.cpp", "jsFunction_validatePlainFunction", 0),
|
||||
/** `(value, name)` */
|
||||
validateUndefined,
|
||||
validateUndefined: $newCppFunction("NodeValidator.cpp", "jsFunction_validateUndefined", 0),
|
||||
/** `(buffer, name = 'buffer')` */
|
||||
validateBuffer: $newCppFunction("NodeValidator.cpp", "jsFunction_validateBuffer", 0),
|
||||
/** `(value, name, oneOf)` */
|
||||
|
||||
@@ -64,7 +64,7 @@ export const bunEnv: NodeJS.Dict<string> = {
|
||||
const ciEnv = { ...bunEnv };
|
||||
|
||||
if (isASAN) {
|
||||
bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1:disable_coredump=0";
|
||||
bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1";
|
||||
}
|
||||
|
||||
if (isWindows) {
|
||||
|
||||
185
test/js/bun/install/dependency-behavior-priority.test.ts
Normal file
185
test/js/bun/install/dependency-behavior-priority.test.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
// Mock the Behavior struct for testing
|
||||
class MockBehavior {
|
||||
prod: boolean = false;
|
||||
dev: boolean = false;
|
||||
peer: boolean = false;
|
||||
optional: boolean = false;
|
||||
workspace: boolean = false;
|
||||
bundled: boolean = false;
|
||||
|
||||
constructor(options: Partial<MockBehavior> = {}) {
|
||||
Object.assign(this, options);
|
||||
}
|
||||
|
||||
isProd() { return this.prod; }
|
||||
isDev() { return this.dev; }
|
||||
isPeer() { return this.peer; }
|
||||
isOptional() { return this.optional && !this.peer; }
|
||||
isWorkspace() { return this.workspace; }
|
||||
isBundled() { return this.bundled; }
|
||||
isWorkspaceOnly() { return this.workspace && !this.dev && !this.prod && !this.optional && !this.peer; }
|
||||
|
||||
eq(other: MockBehavior) {
|
||||
return this.prod === other.prod &&
|
||||
this.dev === other.dev &&
|
||||
this.peer === other.peer &&
|
||||
this.optional === other.optional &&
|
||||
this.workspace === other.workspace &&
|
||||
this.bundled === other.bundled;
|
||||
}
|
||||
|
||||
// Mirror the comparison logic from Zig
|
||||
cmp(other: MockBehavior): "lt" | "eq" | "gt" {
|
||||
if (this.eq(other)) {
|
||||
return "eq";
|
||||
}
|
||||
|
||||
if (this.isWorkspaceOnly() !== other.isWorkspaceOnly()) {
|
||||
return this.isWorkspaceOnly() ? "lt" : "gt";
|
||||
}
|
||||
|
||||
if (this.isProd() !== other.isProd()) {
|
||||
return this.isProd() ? "gt" : "lt";
|
||||
}
|
||||
|
||||
// Special handling for workspace packages with both dev and peer dependencies
|
||||
// If both behaviors have workspace flag, prioritize dev over peer
|
||||
if (this.workspace && other.workspace) {
|
||||
if (this.dev && this.peer && other.peer && !other.dev) {
|
||||
// this is workspace + dev + peer, other is workspace + peer only
|
||||
return "gt";
|
||||
}
|
||||
if (other.dev && other.peer && this.peer && !this.dev) {
|
||||
// other is workspace + dev + peer, this is workspace + peer only
|
||||
return "lt";
|
||||
}
|
||||
}
|
||||
|
||||
if (this.isDev() !== other.isDev()) {
|
||||
return this.isDev() ? "gt" : "lt";
|
||||
}
|
||||
|
||||
if (this.isPeer() !== other.isPeer()) {
|
||||
return this.isPeer() ? "gt" : "lt";
|
||||
}
|
||||
|
||||
if (this.isOptional() !== other.isOptional()) {
|
||||
return this.isOptional() ? "gt" : "lt";
|
||||
}
|
||||
|
||||
if (this.isWorkspace() !== other.isWorkspace()) {
|
||||
return this.isWorkspace() ? "gt" : "lt";
|
||||
}
|
||||
|
||||
return "eq";
|
||||
}
|
||||
}
|
||||
|
||||
test("dependency behavior comparison for workspace packages prioritizes dev+peer over peer-only", () => {
|
||||
const workspaceDevPeer = new MockBehavior({ workspace: true, dev: true, peer: true });
|
||||
const workspacePeerOnly = new MockBehavior({ workspace: true, peer: true });
|
||||
|
||||
// workspace + dev + peer should have higher priority than workspace + peer only
|
||||
expect(workspaceDevPeer.cmp(workspacePeerOnly)).toBe("gt");
|
||||
expect(workspacePeerOnly.cmp(workspaceDevPeer)).toBe("lt");
|
||||
});
|
||||
|
||||
test("regular dev vs peer dependencies follow standard priority", () => {
|
||||
const devBehavior = new MockBehavior({ dev: true });
|
||||
const peerBehavior = new MockBehavior({ peer: true });
|
||||
|
||||
// Without workspace flag, dev and peer follow standard ordering
|
||||
expect(devBehavior.cmp(peerBehavior)).toBe("gt");
|
||||
expect(peerBehavior.cmp(devBehavior)).toBe("lt");
|
||||
});
|
||||
|
||||
test("dependency behavior comparison handles production dependencies", () => {
|
||||
const prodBehavior = new MockBehavior({ prod: true });
|
||||
const devBehavior = new MockBehavior({ dev: true });
|
||||
const peerBehavior = new MockBehavior({ peer: true });
|
||||
|
||||
// Production dependencies should have highest priority
|
||||
expect(prodBehavior.cmp(devBehavior)).toBe("gt");
|
||||
expect(prodBehavior.cmp(peerBehavior)).toBe("gt");
|
||||
expect(devBehavior.cmp(prodBehavior)).toBe("lt");
|
||||
expect(peerBehavior.cmp(prodBehavior)).toBe("lt");
|
||||
});
|
||||
|
||||
test("dependency behavior comparison handles workspace dependencies", () => {
|
||||
const workspaceOnlyBehavior = new MockBehavior({ workspace: true });
|
||||
const devBehavior = new MockBehavior({ dev: true });
|
||||
const peerBehavior = new MockBehavior({ peer: true });
|
||||
|
||||
// Workspace-only dependencies should have highest priority
|
||||
expect(workspaceOnlyBehavior.cmp(devBehavior)).toBe("lt");
|
||||
expect(workspaceOnlyBehavior.cmp(peerBehavior)).toBe("lt");
|
||||
expect(devBehavior.cmp(workspaceOnlyBehavior)).toBe("gt");
|
||||
expect(peerBehavior.cmp(workspaceOnlyBehavior)).toBe("gt");
|
||||
});
|
||||
|
||||
test("dependency behavior comparison handles optional dependencies", () => {
|
||||
const optionalBehavior = new MockBehavior({ optional: true });
|
||||
const devBehavior = new MockBehavior({ dev: true });
|
||||
const peerBehavior = new MockBehavior({ peer: true });
|
||||
|
||||
// Optional dependencies should have lower priority than dev/peer dependencies
|
||||
expect(devBehavior.cmp(optionalBehavior)).toBe("gt");
|
||||
expect(peerBehavior.cmp(optionalBehavior)).toBe("gt");
|
||||
expect(optionalBehavior.cmp(devBehavior)).toBe("lt");
|
||||
expect(optionalBehavior.cmp(peerBehavior)).toBe("lt");
|
||||
});
|
||||
|
||||
test("workspace-specific behavior for dev+peer vs peer dependencies", () => {
|
||||
// Test the specific Next.js monorepo scenario
|
||||
const workspaceDevPeer = new MockBehavior({ workspace: true, dev: true, peer: true });
|
||||
const workspacePeer = new MockBehavior({ workspace: true, peer: true });
|
||||
const workspaceDev = new MockBehavior({ workspace: true, dev: true });
|
||||
|
||||
// Workspace dev+peer should be prioritized over workspace peer-only
|
||||
expect(workspaceDevPeer.cmp(workspacePeer)).toBe("gt");
|
||||
expect(workspacePeer.cmp(workspaceDevPeer)).toBe("lt");
|
||||
|
||||
// Workspace dev+peer vs workspace dev-only follows standard rules
|
||||
expect(workspaceDevPeer.cmp(workspaceDev)).toBe("gt"); // peer flag adds to priority
|
||||
});
|
||||
|
||||
test("non-workspace behavior remains unchanged", () => {
|
||||
const devPeerBehavior = new MockBehavior({ dev: true, peer: true });
|
||||
const peerOnlyBehavior = new MockBehavior({ peer: true });
|
||||
const devOnlyBehavior = new MockBehavior({ dev: true });
|
||||
|
||||
// Without workspace flag, behavior follows standard priority rules
|
||||
expect(devPeerBehavior.cmp(devPeerBehavior)).toBe("eq");
|
||||
expect(devPeerBehavior.cmp(peerOnlyBehavior)).toBe("gt");
|
||||
expect(devPeerBehavior.cmp(devOnlyBehavior)).toBe("gt"); // dev+peer has higher priority than dev-only
|
||||
});
|
||||
|
||||
test("dependency sorting order matches intended priority", () => {
|
||||
const behaviors = [
|
||||
new MockBehavior({ workspace: true }), // workspace-only (highest priority)
|
||||
new MockBehavior({ prod: true }), // production
|
||||
new MockBehavior({ dev: true }), // dev
|
||||
new MockBehavior({ peer: true }), // peer
|
||||
new MockBehavior({ optional: true }), // optional (lowest priority)
|
||||
];
|
||||
|
||||
// Test that each behavior has higher priority than the ones that come after it
|
||||
for (let i = 0; i < behaviors.length - 1; i++) {
|
||||
for (let j = i + 1; j < behaviors.length; j++) {
|
||||
const result = behaviors[i].cmp(behaviors[j]);
|
||||
const reverseResult = behaviors[j].cmp(behaviors[i]);
|
||||
|
||||
// Workspace-only should be "lt" (higher priority = lower in sort order)
|
||||
// Others should be "gt" (higher priority = greater in comparison)
|
||||
if (i === 0) {
|
||||
expect(result).toBe("lt");
|
||||
expect(reverseResult).toBe("gt");
|
||||
} else {
|
||||
expect(result).toBe("gt");
|
||||
expect(reverseResult).toBe("lt");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
import { join } from "path";
|
||||
import { mkdirSync, rmSync } from "fs";
|
||||
|
||||
test("workspace devDependencies should take priority over peerDependencies for resolution", async () => {
|
||||
const dir = tempDirWithFiles("dev-peer-priority", {
|
||||
@@ -9,7 +10,7 @@ test("workspace devDependencies should take priority over peerDependencies for r
|
||||
version: "1.0.0",
|
||||
workspaces: {
|
||||
packages: ["packages/*"],
|
||||
nodeLinker: "isolated",
|
||||
nodeLinker: "isolated"
|
||||
},
|
||||
}),
|
||||
"packages/lib/package.json": JSON.stringify({
|
||||
@@ -17,10 +18,10 @@ test("workspace devDependencies should take priority over peerDependencies for r
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"my-dep": "workspace:*", // Use workspace protocol for dev
|
||||
"my-dep": "workspace:*" // Use workspace protocol for dev
|
||||
},
|
||||
peerDependencies: {
|
||||
"my-dep": "^1.0.0", // Range that wants 1.x
|
||||
"my-dep": "^1.0.0" // Range that wants 1.x
|
||||
},
|
||||
}),
|
||||
"packages/lib/test.js": `const dep = require("my-dep"); console.log(dep.version);`,
|
||||
@@ -34,26 +35,27 @@ test("workspace devDependencies should take priority over peerDependencies for r
|
||||
});
|
||||
|
||||
// Run bun install with a dead registry to ensure no network requests
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: {
|
||||
...bunEnv,
|
||||
NPM_CONFIG_REGISTRY: "http://localhost:9999/", // Dead URL - will fail if used
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>((resolve) => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: {
|
||||
...bunEnv,
|
||||
NPM_CONFIG_REGISTRY: "http://localhost:9999/", // Dead URL - will fail if used
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
proc.exited.then((exitCode) => {
|
||||
Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
@@ -61,28 +63,28 @@ test("workspace devDependencies should take priority over peerDependencies for r
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
|
||||
// Check that no network requests were made for packages that should be resolved locally
|
||||
expect(stderr).not.toContain("GET");
|
||||
expect(stderr).not.toContain("http");
|
||||
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
|
||||
|
||||
// Verify that version 2.0.0 (devDependency) was linked
|
||||
// If peerDependency range ^1.0.0 was used, it would try to fetch from npm and fail
|
||||
const testResult = await new Promise<string>(resolve => {
|
||||
const testResult = await new Promise<string>((resolve) => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "packages/lib/test.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
|
||||
new Response(proc.stdout).text().then(resolve);
|
||||
});
|
||||
|
||||
|
||||
expect(testResult.trim()).toBe("2.0.0");
|
||||
});
|
||||
|
||||
@@ -93,7 +95,7 @@ test("devDependencies and peerDependencies with different versions should coexis
|
||||
version: "1.0.0",
|
||||
workspaces: {
|
||||
packages: ["packages/*"],
|
||||
nodeLinker: "isolated",
|
||||
nodeLinker: "isolated"
|
||||
},
|
||||
}),
|
||||
"packages/lib/package.json": JSON.stringify({
|
||||
@@ -101,10 +103,10 @@ test("devDependencies and peerDependencies with different versions should coexis
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"utils": "1.0.0",
|
||||
"utils": "1.0.0"
|
||||
},
|
||||
peerDependencies: {
|
||||
"utils": "^1.0.0",
|
||||
"utils": "^1.0.0"
|
||||
},
|
||||
}),
|
||||
"packages/lib/index.js": `console.log("lib");`,
|
||||
@@ -117,23 +119,24 @@ test("devDependencies and peerDependencies with different versions should coexis
|
||||
});
|
||||
|
||||
// Run bun install in the monorepo
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>((resolve) => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
proc.exited.then((exitCode) => {
|
||||
Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
@@ -141,7 +144,7 @@ test("devDependencies and peerDependencies with different versions should coexis
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
@@ -154,33 +157,34 @@ test("dependency behavior comparison prioritizes devDependencies", async () => {
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"typescript": "^5.0.0",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
peerDependencies: {
|
||||
"typescript": "^4.0.0 || ^5.0.0",
|
||||
"typescript": "^4.0.0 || ^5.0.0"
|
||||
},
|
||||
}),
|
||||
"index.js": `console.log("app");`,
|
||||
});
|
||||
|
||||
// Run bun install
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>((resolve) => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
proc.exited.then((exitCode) => {
|
||||
Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.error("Install failed with exit code:", exitCode);
|
||||
@@ -188,7 +192,7 @@ test("dependency behavior comparison prioritizes devDependencies", async () => {
|
||||
console.error("stderr:", stderr);
|
||||
}
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
@@ -201,7 +205,7 @@ test("Next.js monorepo scenario should not make unnecessary network requests", a
|
||||
version: "1.0.0",
|
||||
workspaces: {
|
||||
packages: ["packages/*"],
|
||||
nodeLinker: "isolated",
|
||||
nodeLinker: "isolated"
|
||||
},
|
||||
}),
|
||||
"packages/web/package.json": JSON.stringify({
|
||||
@@ -209,10 +213,10 @@ test("Next.js monorepo scenario should not make unnecessary network requests", a
|
||||
version: "1.0.0",
|
||||
dependencies: {},
|
||||
devDependencies: {
|
||||
"next": "15.0.0-canary.119", // Specific canary version for dev
|
||||
"next": "15.0.0-canary.119" // Specific canary version for dev
|
||||
},
|
||||
peerDependencies: {
|
||||
"next": "^14.0.0 || ^15.0.0", // Range that would accept 14.x or 15.x stable
|
||||
"next": "^14.0.0 || ^15.0.0" // Range that would accept 14.x or 15.x stable
|
||||
},
|
||||
}),
|
||||
"packages/web/test.js": `const next = require("next/package.json"); console.log(next.version);`,
|
||||
@@ -226,51 +230,52 @@ test("Next.js monorepo scenario should not make unnecessary network requests", a
|
||||
});
|
||||
|
||||
// Run bun install with dead registry
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>(
|
||||
resolve => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: {
|
||||
...bunEnv,
|
||||
NPM_CONFIG_REGISTRY: "http://localhost:9999/", // Dead URL
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const { stdout, stderr, exitCode } = await new Promise<{ stdout: string; stderr: string; exitCode: number }>((resolve) => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "install", "--no-progress", "--no-summary"],
|
||||
cwd: dir,
|
||||
env: {
|
||||
...bunEnv,
|
||||
NPM_CONFIG_REGISTRY: "http://localhost:9999/", // Dead URL
|
||||
},
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
proc.exited.then(exitCode => {
|
||||
Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
proc.exited.then((exitCode) => {
|
||||
Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
]).then(([stdout, stderr]) => {
|
||||
resolve({ stdout, stderr, exitCode });
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
|
||||
|
||||
// The key test: should not make network requests for packages that exist in workspace
|
||||
// When devDependencies are prioritized over peerDependencies, the workspace version should be used
|
||||
expect(stderr).not.toContain("GET");
|
||||
expect(stderr).not.toContain("404");
|
||||
expect(stderr).not.toContain("http");
|
||||
|
||||
|
||||
// Check that the lockfile was created correctly
|
||||
const lockfilePath = join(dir, "bun.lock");
|
||||
expect(await Bun.file(lockfilePath).exists()).toBe(true);
|
||||
|
||||
|
||||
// Verify that version 15.0.0-canary.119 (devDependency) was used
|
||||
// If peer range was used, it would try to fetch a stable version from npm and fail
|
||||
const testResult = await new Promise<string>(resolve => {
|
||||
const testResult = await new Promise<string>((resolve) => {
|
||||
const proc = Bun.spawn({
|
||||
cmd: [bunExe(), "packages/web/test.js"],
|
||||
cwd: dir,
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
});
|
||||
|
||||
|
||||
new Response(proc.stdout).text().then(resolve);
|
||||
});
|
||||
|
||||
|
||||
expect(testResult.trim()).toBe("15.0.0-canary.119");
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user