Merge branch 'main' into dylan/return-error-from-lexer-expect

This commit is contained in:
Dylan Conway
2025-10-20 14:16:39 -07:00
672 changed files with 42167 additions and 14417 deletions

View File

@@ -0,0 +1,11 @@
// Bun Snapshot v1, https://bun.sh/docs/test/snapshots
exports[`bunfig.toml test options randomize with seed produces consistent order 1`] = `
[
"echo",
"alpha",
"bravo",
"charlie",
"delta",
]
`;

View File

@@ -0,0 +1,199 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
describe("bunfig.toml test options", () => {
test("randomize with seed produces consistent order", async () => {
const dir = tempDirWithFiles("bunfig-test-randomize-seed", {
"test.test.ts": `
import { test, expect } from "bun:test";
test("alpha", () => {
console.log("RUNNING: alpha");
expect(1).toBe(1);
});
test("bravo", () => {
console.log("RUNNING: bravo");
expect(2).toBe(2);
});
test("charlie", () => {
console.log("RUNNING: charlie");
expect(3).toBe(3);
});
test("delta", () => {
console.log("RUNNING: delta");
expect(4).toBe(4);
});
test("echo", () => {
console.log("RUNNING: echo");
expect(5).toBe(5);
});
`,
"bunfig.toml": `[test]\nrandomize = true\nseed = 2444615283`,
});
// Run twice to verify same order
const outputs: string[] = [];
for (let i = 0; i < 2; i++) {
await using proc = Bun.spawn({
cmd: [bunExe(), "test"],
env: bunEnv,
cwd: dir,
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(0);
outputs.push(stdout + stderr);
}
// Extract the order tests ran in
const extractOrder = (output: string) => {
const matches = output.matchAll(/RUNNING: (\w+)/g);
return Array.from(matches, m => m[1]);
};
const order1 = extractOrder(outputs[0]);
const order2 = extractOrder(outputs[1]);
// Should have all 5 tests
expect(order1.length).toBe(5);
expect(order2.length).toBe(5);
// Order should be identical across runs
expect(order1).toEqual(order2);
// Order should NOT be alphabetical (tests randomization is working)
const alphabetical = ["alpha", "bravo", "charlie", "delta", "echo"];
expect(order1).not.toEqual(alphabetical);
// Snapshot the actual order for regression testing
expect(order1).toMatchSnapshot();
});
test("seed without randomize errors", async () => {
const dir = tempDirWithFiles("bunfig-test-seed-no-randomize", {
"test.test.ts": `
import { test, expect } from "bun:test";
test("test 1", () => expect(1).toBe(1));
`,
"bunfig.toml": `[test]\nseed = 2444615283`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test"],
env: bunEnv,
cwd: dir,
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(1);
const output = stdout + stderr;
expect(output).toContain("seed");
expect(output).toContain("randomize");
});
test("seed with randomize=false errors", async () => {
const dir = tempDirWithFiles("bunfig-test-seed-randomize-false", {
"test.test.ts": `
import { test, expect } from "bun:test";
test("test 1", () => expect(1).toBe(1));
`,
"bunfig.toml": `[test]\nrandomize = false\nseed = 2444615283`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test"],
env: bunEnv,
cwd: dir,
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(1);
const output = stdout + stderr;
expect(output).toContain("seed");
expect(output).toContain("randomize");
});
test("rerunEach option works", async () => {
const dir = tempDirWithFiles("bunfig-test-rerun-each", {
"test.test.ts": `
import { test, expect } from "bun:test";
let counter = 0;
test("test 1", () => {
counter++;
expect(counter).toBeGreaterThan(0);
});
`,
"bunfig.toml": `[test]\nrerunEach = 3`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test"],
env: bunEnv,
cwd: dir,
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(0);
const output = stdout + stderr;
// With rerunEach = 3, the test file should run 3 times
// So we should see "3 pass" (1 test * 3 runs)
expect(output).toContain("3 pass");
});
test("all test options together", async () => {
const dir = tempDirWithFiles("bunfig-test-all-options", {
"test.test.ts": `
import { test, expect } from "bun:test";
test("test 1", () => expect(1).toBe(1));
test("test 2", () => expect(2).toBe(2));
`,
"bunfig.toml": `[test]\nrandomize = true\nseed = 12345\nrerunEach = 2`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test"],
env: bunEnv,
cwd: dir,
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(0);
const output = stdout + stderr;
// 2 tests * 2 reruns = 4 total test runs
expect(output).toContain("4 pass");
});
});

View File

@@ -243,7 +243,7 @@ import path from "path";
expect(pkg).toHaveProperty("devDependencies.@types/react-dom");
expect(fs.existsSync(path.join(temp, "src"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/index.tsx"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/index.ts"))).toBe(true);
expect(fs.existsSync(path.join(temp, "tsconfig.json"))).toBe(true);
}, 30_000);
@@ -267,7 +267,7 @@ import path from "path";
expect(pkg).toHaveProperty("dependencies.bun-plugin-tailwind");
expect(fs.existsSync(path.join(temp, "src"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/index.tsx"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/index.ts"))).toBe(true);
}, 30_000);
test("bun init --react=shadcn works", async () => {
@@ -291,7 +291,7 @@ import path from "path";
expect(pkg).toHaveProperty("dependencies.bun-plugin-tailwind");
expect(fs.existsSync(path.join(temp, "src"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/index.tsx"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/index.ts"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/components"))).toBe(true);
expect(fs.existsSync(path.join(temp, "src/components/ui"))).toBe(true);
}, 30_000);

View File

@@ -468,11 +468,15 @@ describe.if(isPosix)("BunFrontendDevServer inspector protocol", () => {
test("should notify on clientNavigated events", async () => {
await fetch(serverUrl.href).then(r => r.blob());
// IMPORTANT: Set up event listener BEFORE creating WebSocket to avoid race condition
// The clientConnected event is sent immediately in onOpen, so we must listen first
const connectedEventPromise = session.waitForEvent("BunFrontendDevServer.clientConnected");
// Connect a client to trigger connection events
const ws = await createHMRClient();
// Wait for clientConnected event to get the connectionId
const connectedEvent = await session.waitForEvent("BunFrontendDevServer.clientConnected");
const connectedEvent = await connectedEventPromise;
const connectionId = connectedEvent.connectionId;
// Listen for clientNavigated event
@@ -500,11 +504,15 @@ describe.if(isPosix)("BunFrontendDevServer inspector protocol", () => {
test("should notify on consoleLog events", async () => {
await fetch(serverUrl.href).then(r => r.blob());
// IMPORTANT: Set up event listener BEFORE creating WebSocket to avoid race condition
// The clientConnected event is sent immediately in onOpen, so we must listen first
const connectedEventPromise = session.waitForEvent("BunFrontendDevServer.clientConnected");
// Connect a client to trigger connection events
const ws = await createHMRClient();
// Wait for clientConnected event to get the connectionId
const connectedEvent = await session.waitForEvent("BunFrontendDevServer.clientConnected");
const connectedEvent = await connectedEventPromise;
// Listen for consoleLog event
const consoleLogPromise = session.waitForEvent("BunFrontendDevServer.consoleLog");

View File

@@ -34,22 +34,3 @@ error: "workspaces.packages" expects an array of strings, e.g.
`;
exports[`should handle modified git resolutions in bun.lock 1`] = `"{"lockfileVersion":0,"workspaces":{"":{"dependencies":{"jquery":"3.7.1"}}},"packages":{"jquery":["jquery@git+ssh://git@github.com/dylan-conway/install-test-8.git#3a1288830817d13da39e9231302261896f8721ea",{},"3a1288830817d13da39e9231302261896f8721ea"]}}"`;
exports[`should read install.saveTextLockfile from bunfig.toml 1`] = `
"{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "foo",
},
"packages/pkg1": {
"name": "pkg-one",
"version": "1.0.0",
},
},
"packages": {
"pkg-one": ["pkg-one@workspace:packages/pkg1"],
}
}
"
`;

View File

@@ -1,6 +1,6 @@
import { spawnSync } from "bun";
import { beforeAll, beforeEach, expect, setDefaultTimeout, test } from "bun:test";
import { writeFileSync } from "fs";
import { mkdirSync, writeFileSync } from "fs";
import { bunEnv, bunExe, tmpdirSync } from "harness";
let cwd: string;
@@ -38,3 +38,89 @@ test("bad workspace path", () => {
expect(exitCode).toBe(1);
});
test("workspace with ./ should not crash", () => {
writeFileSync(
`${cwd}/package.json`,
JSON.stringify(
{
name: "my-app",
version: "1.0.0",
workspaces: ["./", "some-workspace"],
devDependencies: {
"@eslint/js": "^9.28.0",
},
},
null,
2,
),
);
mkdirSync(`${cwd}/some-workspace`);
writeFileSync(
`${cwd}/some-workspace/package.json`,
JSON.stringify(
{
name: "some-workspace",
version: "1.0.0",
},
null,
2,
),
);
const { stderr, exitCode } = spawnSync({
cmd: [bunExe(), "install"],
cwd,
env: bunEnv,
stderr: "pipe",
stdout: "pipe",
});
const text = stderr!.toString();
// Should not crash, should succeed
expect(exitCode).toBe(0);
expect(text).not.toContain("panic");
expect(text).not.toContain("Internal assertion failure");
});
test("workspace with .\\ should not crash", () => {
writeFileSync(
`${cwd}/package.json`,
JSON.stringify(
{
name: "my-app",
version: "1.0.0",
workspaces: [".\\", "some-workspace"],
devDependencies: {
"@eslint/js": "^9.28.0",
},
},
null,
2,
),
);
mkdirSync(`${cwd}/some-workspace`);
writeFileSync(
`${cwd}/some-workspace/package.json`,
JSON.stringify(
{
name: "some-workspace",
version: "1.0.0",
},
null,
2,
),
);
const { stderr, exitCode } = spawnSync({
cmd: [bunExe(), "install"],
cwd,
env: bunEnv,
stderr: "pipe",
stdout: "pipe",
});
const text = stderr!.toString();
// Should not crash, should succeed
expect(exitCode).toBe(0);
expect(text).not.toContain("panic");
expect(text).not.toContain("Internal assertion failure");
});

View File

@@ -1064,6 +1064,9 @@ it("should add dependency alongside workspaces", async () => {
name: "foo",
version: "0.0.1",
workspaces: ["packages/*"],
"dependencies": {
"bar": "workspace:*",
},
}),
);
await mkdir(join(package_dir, "packages", "bar"), { recursive: true });
@@ -1097,7 +1100,14 @@ it("should add dependency alongside workspaces", async () => {
expect(await exited).toBe(0);
expect(urls.sort()).toEqual([`${root_url}/baz`, `${root_url}/baz-0.0.3.tgz`]);
expect(requested).toBe(2);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", "bar", "baz"]);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
".bin",
".bun",
".cache",
expect.stringContaining(".old_modules-"),
"bar",
"baz",
]);
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toHaveBins(["baz-run"]);
expect(join(package_dir, "node_modules", ".bin", "baz-run")).toBeValidBin(join("..", "baz", "index.js"));
expect(await readlink(join(package_dir, "node_modules", "bar"))).toBeWorkspaceLink(join("..", "packages", "bar"));
@@ -1117,6 +1127,7 @@ it("should add dependency alongside workspaces", async () => {
version: "0.0.1",
workspaces: ["packages/*"],
dependencies: {
bar: "workspace:*",
baz: "^0.0.3",
},
},
@@ -2124,9 +2135,7 @@ it("should add dependencies to workspaces directly", async () => {
expect(await readdirSorted(join(package_dir, "moo"))).toEqual(["bunfig.toml", "node_modules", "package.json"]);
expect(await readdirSorted(join(package_dir, "moo", "node_modules", "foo"))).toEqual(["package.json"]);
if (process.platform === "win32") {
expect(await file(await readlink(join(package_dir, "moo", "node_modules", "foo", "package.json"))).json()).toEqual(
fooPackage,
);
expect(await file(join(package_dir, "moo", "node_modules", "foo", "package.json")).json()).toEqual(fooPackage);
} else {
expect(await file(join(package_dir, "moo", "node_modules", "foo", "package.json")).json()).toEqual(fooPackage);
}
@@ -2137,7 +2146,11 @@ it("should add dependencies to workspaces directly", async () => {
foo: `file:${add_path.replace(/\\/g, "/")}`,
},
});
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".cache", "moo"]);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([
".bun",
".cache",
expect.stringContaining(".old_modules-"),
]);
});
it("should redirect 'install --save X' to 'add'", async () => {

View File

@@ -19,7 +19,9 @@ expect.extend({
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach();
});
afterEach(dummyAfterEach);
describe("bun install --cpu and --os flags", () => {

View File

@@ -1,5 +1,6 @@
import { $ } from "bun";
import { beforeAll, describe, expect, it, setDefaultTimeout, test } from "bun:test";
import { rmSync } from "fs";
import { bunEnv, bunExe, normalizeBunSnapshot as normalizeBunSnapshot_, tempDirWithFiles } from "harness";
import { join } from "path";
@@ -114,14 +115,7 @@ index c8950c17b265104bcf27f8c345df1a1b13a78950..7ce57ab96400ab0ff4fac7e06f6e02c2
}
: (x: string) => x;
const versions: [version: string, patchVersion?: string][] = [
["1.0.0"],
["github:i-voted-for-trump/is-even", "github:i-voted-for-trump/is-even#585f800"],
[
"git@github.com:i-voted-for-trump/is-even.git",
"git+ssh://git@github.com:i-voted-for-trump/is-even.git#585f8002bb16f7bec723a47349b67df451f1b25d",
],
];
const versions: [version: string, patchVersion?: string][] = [["1.0.0"]];
describe("should patch a dependency when its dependencies are not hoisted", async () => {
// is-even depends on is-odd ^0.1.2 and we add is-odd 3.0.1, which should be hoisted
@@ -559,7 +553,7 @@ index 832d92223a9ec491364ee10dcbe3ad495446ab80..7e079a817825de4b8c3d01898490dc7e
await Bun.write(join(filedir, "package.json"), JSON.stringify(pkgjsonWithPatch));
await using proc = Bun.spawn({
cmd: [bunExe(), "install"],
cmd: [bunExe(), "install", "--linker=hoisted"],
env: bunEnv,
cwd: filedir,
stdout: "pipe",
@@ -577,4 +571,374 @@ index 832d92223a9ec491364ee10dcbe3ad495446ab80..7e079a817825de4b8c3d01898490dc7e
expect(normalizeBunSnapshot(stdout)).toMatchInlineSnapshot(`"bun install <version> (<revision>)"`);
}
});
describe("bun patch with --linker=isolated", () => {
test("should create patch for package and commit it", async () => {
const filedir = tempDirWithFiles("patch-isolated", {
"package.json": JSON.stringify({
"name": "bun-patch-isolated-test",
"module": "index.ts",
"type": "module",
"dependencies": {
"is-even": "1.0.0",
},
}),
"index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(2));`,
});
// Install with isolated linker
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
// Run bun patch command
const { stdout: patchStdout } = await $`${bunExe()} patch is-even`.env(bunEnv).cwd(filedir);
const patchOutput = patchStdout.toString();
const relativePatchPath =
patchOutput.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath).toBeTruthy();
const patchPath = join(filedir, relativePatchPath!);
// Edit the patched package
const indexPath = join(patchPath, "index.js");
const originalContent = await Bun.file(indexPath).text();
const modifiedContent = originalContent.replace(
"module.exports = function isEven(i) {",
'module.exports = function isEven(i) {\n console.log("PATCHED with isolated linker!");',
);
await Bun.write(indexPath, modifiedContent);
// Commit the patch
const { stderr: commitStderr } = await $`${bunExe()} patch --commit '${relativePatchPath}'`
.env(bunEnv)
.cwd(filedir);
// With isolated linker, there may be some stderr output during patch commit
// but it should not contain actual errors
const commitStderrText = commitStderr.toString();
expect(commitStderrText).not.toContain("error:");
expect(commitStderrText).not.toContain("panic:");
// Verify patch file was created
const patchFile = join(filedir, "patches", "is-even@1.0.0.patch");
expect(await Bun.file(patchFile).exists()).toBe(true);
// Verify package.json was updated
const pkgJson = await Bun.file(join(filedir, "package.json")).json();
expect(pkgJson.patchedDependencies).toEqual({
"is-even@1.0.0": "patches/is-even@1.0.0.patch",
});
// Run the code to verify patch was applied
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir);
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("PATCHED with isolated linker!");
});
test("should patch transitive dependency with isolated linker", async () => {
const filedir = tempDirWithFiles("patch-isolated-transitive", {
"package.json": JSON.stringify({
"name": "bun-patch-isolated-transitive-test",
"module": "index.ts",
"type": "module",
"dependencies": {
"is-even": "1.0.0",
},
}),
"index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(3));`,
});
// Install with isolated linker
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
await $`${bunExe()} patch is-odd`.env(bunEnv).cwd(filedir);
// Patch transitive dependency (is-odd)
const { stdout: patchStdout } = await $`${bunExe()} patch is-odd@0.1.2`.env(bunEnv).cwd(filedir);
const patchOutput = patchStdout.toString();
const relativePatchPath =
patchOutput.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath).toBeTruthy();
const patchPath = join(filedir, relativePatchPath!);
// Edit the patched package
const indexPath = join(patchPath, "index.js");
const originalContent = await Bun.file(indexPath).text();
const modifiedContent = originalContent.replace(
"module.exports = function isOdd(i) {",
'module.exports = function isOdd(i) {\n console.log("Transitive patch with isolated!");',
);
await Bun.write(indexPath, modifiedContent);
// Commit the patch
const { stderr: commitStderr } = await $`${bunExe()} patch --commit '${relativePatchPath}'`
.env(bunEnv)
.cwd(filedir);
await $`${bunExe()} i --linker isolated`.env(bunEnv).cwd(filedir);
// With isolated linker, there may be some stderr output during patch commit
// but it should not contain actual errors
const commitStderrText = commitStderr.toString();
expect(commitStderrText).not.toContain("error:");
expect(commitStderrText).not.toContain("panic:");
// Verify patch was applied
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir);
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("Transitive patch with isolated!");
});
test("should handle scoped packages with isolated linker", async () => {
const filedir = tempDirWithFiles("patch-isolated-scoped", {
"package.json": JSON.stringify({
"name": "bun-patch-isolated-scoped-test",
"module": "index.ts",
"type": "module",
"dependencies": {
"@zackradisic/hls-dl": "0.0.1",
},
}),
"index.ts": /* ts */ `import hlsDl from '@zackradisic/hls-dl'; console.log("Testing scoped package");`,
});
// Install with isolated linker
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
// Patch scoped package
const { stdout: patchStdout } = await $`${bunExe()} patch @zackradisic/hls-dl`.env(bunEnv).cwd(filedir);
const patchOutput = patchStdout.toString();
const relativePatchPath =
patchOutput.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath).toBeTruthy();
const patchPath = join(filedir, relativePatchPath!);
// Create a new index.js in the patched package
const indexPath = join(patchPath, "index.js");
await Bun.write(indexPath, `module.exports = () => 'SCOPED PACKAGE PATCHED with isolated!';`);
// Update package.json to point to the new index.js
const pkgJsonPath = join(patchPath, "package.json");
const pkgJson = await Bun.file(pkgJsonPath).json();
pkgJson.main = "./index.js";
await Bun.write(pkgJsonPath, JSON.stringify(pkgJson, null, 2));
// Commit the patch
const { stderr: commitStderr } = await $`${bunExe()} patch --commit '${relativePatchPath}'`
.env(bunEnv)
.cwd(filedir);
// With isolated linker, there may be some stderr output during patch commit
// but it should not contain actual errors
const commitStderrText = commitStderr.toString();
expect(commitStderrText).not.toContain("error:");
expect(commitStderrText).not.toContain("panic:");
// Update index.ts to actually use the patched module
await Bun.write(
join(filedir, "index.ts"),
/* ts */ `import hlsDl from '@zackradisic/hls-dl'; console.log(hlsDl());`,
);
// Verify patch was applied
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir);
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("SCOPED PACKAGE PATCHED with isolated!");
});
test("should work with workspaces and isolated linker", async () => {
const filedir = tempDirWithFiles("patch-isolated-workspace", {
"package.json": JSON.stringify({
"name": "workspace-root",
"workspaces": ["packages/*"],
}),
packages: {
app: {
"package.json": JSON.stringify({
"name": "app",
"dependencies": {
"is-even": "1.0.0",
},
}),
"index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(4));`,
},
},
});
// Install with isolated linker
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
// Patch from workspace root
const { stdout: patchStdout } = await $`${bunExe()} patch is-even`.env(bunEnv).cwd(filedir);
const patchOutput = patchStdout.toString();
const relativePatchPath =
patchOutput.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath).toBeTruthy();
const patchPath = join(filedir, relativePatchPath!);
// Edit the patched package
const indexPath = join(patchPath, "index.js");
const originalContent = await Bun.file(indexPath).text();
const modifiedContent = originalContent.replace(
"module.exports = function isEven(i) {",
'module.exports = function isEven(i) {\n console.log("WORKSPACE PATCH with isolated!");',
);
await Bun.write(indexPath, modifiedContent);
// Commit the patch
const { stderr: commitStderr } = await $`${bunExe()} patch --commit '${relativePatchPath}'`
.env(bunEnv)
.cwd(filedir);
// With isolated linker, there may be some stderr output during patch commit
// but it should not contain actual errors
const commitStderrText = commitStderr.toString();
expect(commitStderrText).not.toContain("error:");
expect(commitStderrText).not.toContain("panic:");
// Verify root package.json was updated
const rootPkgJson = await Bun.file(join(filedir, "package.json")).json();
expect(rootPkgJson.patchedDependencies).toEqual({
"is-even@1.0.0": "patches/is-even@1.0.0.patch",
});
// Run from workspace package to verify patch was applied
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(join(filedir, "packages", "app"));
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("WORKSPACE PATCH with isolated!");
});
test("should preserve patch after reinstall with isolated linker", async () => {
const filedir = tempDirWithFiles("patch-isolated-reinstall", {
"package.json": JSON.stringify({
"name": "bun-patch-isolated-reinstall-test",
"module": "index.ts",
"type": "module",
"dependencies": {
"is-even": "1.0.0",
},
}),
"index.ts": /* ts */ `import isEven from 'is-even'; console.log(isEven(6));`,
});
// Install with isolated linker
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
// Create and commit a patch
const { stdout: patchStdout } = await $`${bunExe()} patch is-even`.env(bunEnv).cwd(filedir);
const patchOutput = patchStdout.toString();
const relativePatchPath =
patchOutput.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath).toBeTruthy();
const patchPath = join(filedir, relativePatchPath!);
const indexPath = join(patchPath, "index.js");
const originalContent = await Bun.file(indexPath).text();
const modifiedContent = originalContent.replace(
"module.exports = function isEven(i) {",
'module.exports = function isEven(i) {\n console.log("REINSTALL TEST with isolated!");',
);
await Bun.write(indexPath, modifiedContent);
await $`${bunExe()} patch --commit '${relativePatchPath}'`.env(bunEnv).cwd(filedir);
// Delete node_modules and reinstall with isolated linker
rmSync(join(filedir, "node_modules"), { force: true, recursive: true });
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
// Verify patch is still applied
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir);
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("REINSTALL TEST with isolated!");
});
test("should handle multiple patches with isolated linker", async () => {
const filedir = tempDirWithFiles("patch-isolated-multiple", {
"package.json": JSON.stringify({
"name": "bun-patch-isolated-multiple-test",
"module": "index.ts",
"type": "module",
"dependencies": {
"is-even": "1.0.0",
"is-odd": "3.0.1",
},
}),
"index.ts": /* ts */ `
import isEven from 'is-even';
import isOdd from 'is-odd';
console.log(isEven(8));
console.log(isOdd(9));
`,
});
// Install with isolated linker
await $`${bunExe()} install --linker=isolated`.env(bunEnv).cwd(filedir);
// Patch first package (is-even)
const { stdout: patchStdout1 } = await $`${bunExe()} patch is-even`.env(bunEnv).cwd(filedir);
const patchOutput1 = patchStdout1.toString();
const relativePatchPath1 =
patchOutput1.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput1.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath1).toBeTruthy();
const patchPath1 = join(filedir, relativePatchPath1!);
const indexPath1 = join(patchPath1, "index.js");
const originalContent1 = await Bun.file(indexPath1).text();
const modifiedContent1 = originalContent1.replace(
"module.exports = function isEven(i) {",
'module.exports = function isEven(i) {\n console.log("is-even PATCHED with isolated!");',
);
await Bun.write(indexPath1, modifiedContent1);
const { stderr: commitStderr1 } = await $`${bunExe()} patch --commit '${relativePatchPath1}'`
.env(bunEnv)
.cwd(filedir);
// Check for errors
const commitStderrText1 = commitStderr1.toString();
expect(commitStderrText1).not.toContain("error:");
expect(commitStderrText1).not.toContain("panic:");
// Patch second package (is-odd hoisted version)
const { stdout: patchStdout2 } = await $`${bunExe()} patch is-odd@3.0.1`.env(bunEnv).cwd(filedir);
const patchOutput2 = patchStdout2.toString();
const relativePatchPath2 =
patchOutput2.match(/To patch .+, edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim() ||
patchOutput2.match(/edit the following folder:\s*\n\s*(.+)/)?.[1]?.trim();
expect(relativePatchPath2).toBeTruthy();
const patchPath2 = join(filedir, relativePatchPath2!);
const indexPath2 = join(patchPath2, "index.js");
const originalContent2 = await Bun.file(indexPath2).text();
const modifiedContent2 = originalContent2.replace(
"module.exports = function isOdd(value) {",
'module.exports = function isOdd(value) {\n console.log("is-odd PATCHED with isolated!");',
);
await Bun.write(indexPath2, modifiedContent2);
const { stderr: commitStderr2 } = await $`${bunExe()} patch --commit '${relativePatchPath2}'`
.env(bunEnv)
.cwd(filedir);
// Check for errors
const commitStderrText2 = commitStderr2.toString();
expect(commitStderrText2).not.toContain("error:");
expect(commitStderrText2).not.toContain("panic:");
// Verify both patches were applied
const { stdout, stderr } = await $`${bunExe()} run index.ts`.env(bunEnv).cwd(filedir);
expect(stderr.toString()).toBe("");
expect(stdout.toString()).toContain("is-even PATCHED with isolated!");
expect(stdout.toString()).toContain("is-odd PATCHED with isolated!");
// Verify package.json has both patches
const pkgJson = await Bun.file(join(filedir, "package.json")).json();
expect(pkgJson.patchedDependencies).toEqual({
"is-even@1.0.0": "patches/is-even@1.0.0.patch",
"is-odd@3.0.1": "patches/is-odd@3.0.1.patch",
});
});
});
});

View File

@@ -14,7 +14,9 @@ import {
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach();
});
afterEach(dummyAfterEach);
function test(

View File

@@ -67,7 +67,9 @@ beforeAll(() => {
});
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach({ linker: "hoisted" });
});
afterEach(dummyAfterEach);
for (let input of ["abcdef", "65537", "-1"]) {
@@ -3528,12 +3530,12 @@ it("should handle bitbucket git dependencies", async () => {
"",
`+ public-install-test@git+ssh://${dep}#79265e2d9754c60b60f97cc8d859fb6da073b5d2`,
"",
"1 package installed",
expect.stringContaining("installed"),
]);
expect(await exited).toBe(0);
await access(join(package_dir, "bun.lockb"));
dummyAfterEach();
dummyBeforeEach();
await dummyAfterEach();
await dummyBeforeEach({ linker: "isolated" });
}
for (const dep of deps) {
@@ -3564,12 +3566,12 @@ it("should handle bitbucket git dependencies", async () => {
"",
`installed publicinstalltest@git+ssh://${dep}#79265e2d9754c60b60f97cc8d859fb6da073b5d2`,
"",
"1 package installed",
expect.stringContaining("installed"),
]);
expect(await exited).toBe(0);
await access(join(package_dir, "bun.lockb"));
dummyAfterEach();
dummyBeforeEach();
await dummyAfterEach();
await dummyBeforeEach({ linker: "isolated" });
}
});
@@ -3605,12 +3607,12 @@ it("should handle gitlab git dependencies", async () => {
"",
`+ public-install-test@git+ssh://${dep}#93f3aa4ec9ca8a0bacc010776db48bfcd915c44c`,
"",
"1 package installed",
expect.stringContaining("installed"),
]);
expect(await exited).toBe(0);
await access(join(package_dir, "bun.lockb"));
dummyAfterEach();
dummyBeforeEach();
await dummyAfterEach();
await dummyBeforeEach({ linker: "isolated" });
}
for (const dep of deps) {
@@ -3641,12 +3643,12 @@ it("should handle gitlab git dependencies", async () => {
"",
`installed public-install-test@git+ssh://${dep}#93f3aa4ec9ca8a0bacc010776db48bfcd915c44c`,
"",
"1 package installed",
expect.stringContaining("installed"),
]);
expect(await exited).toBe(0);
await access(join(package_dir, "bun.lockb"));
dummyAfterEach();
dummyBeforeEach();
await dummyAfterEach();
await dummyBeforeEach({ linker: "isolated" });
}
});
@@ -6977,7 +6979,7 @@ it("should handle installing workspaces with more complicated globs", async () =
.toString()
.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "")
.split(/\r?\n/),
).toEqual([expect.stringContaining("bun install v1."), "", "4 packages installed"]);
).toEqual([expect.stringContaining("bun install v1."), "", "Checked 7 installs across 5 packages (no changes)"]);
});
it("should handle installing workspaces with multiple glob patterns", async () => {
@@ -7040,7 +7042,7 @@ it("should handle installing workspaces with multiple glob patterns", async () =
.toString()
.replace(/\s*\[[0-9\.]+m?s\]\s*$/, "")
.split(/\r?\n/),
).toEqual([expect.stringContaining("bun install v1."), "", "4 packages installed"]);
).toEqual([expect.stringContaining("bun install v1."), "", "Checked 7 installs across 5 packages (no changes)"]);
});
it.todo("should handle installing workspaces with absolute glob patterns", async () => {
@@ -8433,6 +8435,9 @@ saveTextLockfile = true
JSON.stringify({
name: "foo",
workspaces: ["packages/*"],
dependencies: {
"pkg-one": "workspace:*",
},
}),
),
write(
@@ -8456,7 +8461,7 @@ saveTextLockfile = true
expect(err).not.toContain("error:");
expect(err).toContain("Saved lockfile");
const out = await stdout.text();
expect(out).toContain("1 package installed");
expect(out).toContain("Checked 3 installs across 2 packages (no changes)");
expect(await exited).toBe(0);
expect(await Bun.file(join(package_dir, "node_modules", "pkg-one", "package.json")).json()).toEqual({
@@ -8464,7 +8469,27 @@ saveTextLockfile = true
version: "1.0.0",
});
expect(await exists(join(package_dir, "bun.lockb"))).toBeFalse();
expect(await file(join(package_dir, "bun.lock")).text()).toMatchSnapshot();
expect(await file(join(package_dir, "bun.lock")).text()).toMatchInlineSnapshot(`
"{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "foo",
"dependencies": {
"pkg-one": "workspace:*",
},
},
"packages/pkg1": {
"name": "pkg-one",
"version": "1.0.0",
},
},
"packages": {
"pkg-one": ["pkg-one@workspace:packages/pkg1"],
}
}
"
`);
});
test("providing invalid url in lockfile does not crash", async () => {

View File

@@ -4,6 +4,7 @@ import { access, mkdir, writeFile } from "fs/promises";
import {
bunExe,
bunEnv as env,
isWindows,
readdirSorted,
runBunInstall,
stderrForInstall,
@@ -62,7 +63,7 @@ it("should link and unlink workspace package", async () => {
expect(out.replace(/\s*\[[0-9\.]+ms\]\s*$/, "").split(/\r?\n/)).toEqual([
expect.stringContaining("bun install v1."),
"",
"2 packages installed",
"Done! Checked 3 packages (no changes)",
]);
let { stdout, stderr, exited } = spawn({
@@ -366,11 +367,12 @@ it("should link dependency without crashing", async () => {
name: link_name,
version: "0.0.1",
bin: {
[link_name]: `${link_name}.js`,
[link_name]: `${link_name}.py`,
},
}),
);
await writeFile(join(link_dir, `${link_name}.js`), "console.log(42);");
// Use a Python script with \r\n shebang to test normalization
await writeFile(join(link_dir, `${link_name}.py`), "#!/usr/bin/env python\r\nprint('hello from python')");
await writeFile(
join(package_dir, "package.json"),
JSON.stringify({
@@ -413,10 +415,18 @@ it("should link dependency without crashing", async () => {
expect(await exited2).toBe(0);
expect(await readdirSorted(join(package_dir, "node_modules"))).toEqual([".bin", ".cache", link_name].sort());
expect(await readdirSorted(join(package_dir, "node_modules", ".bin"))).toHaveBins([link_name]);
expect(join(package_dir, "node_modules", ".bin", link_name)).toBeValidBin(join("..", link_name, `${link_name}.js`));
expect(join(package_dir, "node_modules", ".bin", link_name)).toBeValidBin(join("..", link_name, `${link_name}.py`));
expect(await readdirSorted(join(package_dir, "node_modules", link_name))).toEqual(
["package.json", `${link_name}.js`].sort(),
["package.json", `${link_name}.py`].sort(),
);
// Verify that the shebang was normalized from \r\n to \n (only on non-Windows)
const binContent = await file(join(package_dir, "node_modules", link_name, `${link_name}.py`)).text();
if (isWindows) {
expect(binContent).toStartWith("#!/usr/bin/env python\r\nprint");
} else {
expect(binContent).toStartWith("#!/usr/bin/env python\nprint");
expect(binContent).not.toContain("\r\n");
}
await access(join(package_dir, "bun.lockb"));
const {

View File

@@ -1280,6 +1280,112 @@ describe("bins", () => {
expect(tarball.entries[1].perm & (0o644 | 0o111)).toBe(0o644 | 0o111);
expect(tarball.entries[2].perm & (0o644 | 0o111)).toBe(0o644 | 0o111);
});
test('are included even if not included in "files"', async () => {
await Promise.all([
write(
join(packageDir, "package.json"),
JSON.stringify({
name: "pack-bins-and-files-1",
version: "2.2.2",
files: ["dist"],
bin: "bin.js",
}),
),
write(join(packageDir, "dist", "hi.js"), "console.log('hi!')"),
write(join(packageDir, "bin.js"), "console.log('hello')"),
]);
await pack(packageDir, bunEnv);
const tarball = readTarball(join(packageDir, "pack-bins-and-files-1-2.2.2.tgz"));
expect(tarball.entries).toMatchObject([
{
pathname: "package/package.json",
},
{
pathname: "package/bin.js",
},
{
pathname: "package/dist/hi.js",
},
]);
});
test('"directories" works with "files"', async () => {
await Promise.all([
write(
join(packageDir, "package.json"),
JSON.stringify({
name: "pack-bins-and-files-2",
version: "1.2.3",
files: ["dist"],
directories: {
bin: "bins",
},
}),
),
write(join(packageDir, "dist", "hi.js"), "console.log('hi!')"),
write(join(packageDir, "bins", "bin.js"), "console.log('hello')"),
write(join(packageDir, "bins", "what", "what.js"), "console.log('hello')"),
]);
await pack(packageDir, bunEnv);
const tarball = readTarball(join(packageDir, "pack-bins-and-files-2-1.2.3.tgz"));
expect(tarball.entries).toMatchObject([
{
pathname: "package/package.json",
},
{
pathname: "package/bins/bin.js",
},
{
pathname: "package/bins/what/what.js",
},
{
pathname: "package/dist/hi.js",
},
]);
});
test('deduplicate with "files"', async () => {
await Promise.all([
write(
join(packageDir, "package.json"),
JSON.stringify({
name: "pack-bins-and-files-2",
version: "1.2.3",
files: ["dist", "bins/bin.js"],
directories: {
bin: "bins",
},
}),
),
write(join(packageDir, "dist", "hi.js"), "console.log('hi!')"),
write(join(packageDir, "bins", "bin.js"), "console.log('hello')"),
write(join(packageDir, "bins", "what", "what.js"), "console.log('hello')"),
]);
await pack(packageDir, bunEnv);
const tarball = readTarball(join(packageDir, "pack-bins-and-files-2-1.2.3.tgz"));
expect(tarball.entries).toMatchObject([
{
pathname: "package/package.json",
},
{
pathname: "package/bins/bin.js",
},
{
pathname: "package/bins/what/what.js",
},
{
pathname: "package/dist/hi.js",
},
]);
});
});
test("unicode", async () => {

View File

@@ -19,7 +19,7 @@ describe("bun patch <pkg>", async () => {
*/
describe("inside workspace with hoisting", async () => {
const args = [
["node_modules/@types/ws", "node_modules/@types/ws"],
["packages/eslint-config/node_modules/@types/ws", "packages/eslint-config/node_modules/@types/ws"],
["@types/ws@8.5.4", "node_modules/@types/ws"],
];
for (const [arg, path] of args) {

View File

@@ -18,7 +18,9 @@ import {
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach();
});
afterEach(dummyAfterEach);
it("should list top-level dependency", async () => {

View File

@@ -1,6 +1,7 @@
import { describe, expect, test } from "bun:test";
import { realpathSync } from "fs";
import { bunEnv, bunExe, isWindows, tempDirWithFiles, toTOMLString } from "harness";
import { join as pathJoin } from "node:path";
describe.each(["bun run", "bun"])(`%s`, cmd => {
const runCmd = cmd === "bun" ? ["-c=bunfig.toml", "run"] : ["-c=bunfig.toml"];
@@ -131,4 +132,119 @@ describe.each(["bun run", "bun"])(`%s`, cmd => {
expect(result.success).toBeFalse();
});
});
test("autoload local bunfig.toml (same cwd)", async () => {
const runCmd = cmd === "bun" ? ["run"] : [];
const bunfig = toTOMLString({
run: {
bun: true,
},
});
const cwd = tempDirWithFiles("run.where.node", {
"bunfig.toml": bunfig,
"package.json": JSON.stringify(
{
scripts: {
"where-node": `which node`,
},
},
null,
2,
),
});
const result = Bun.spawnSync({
cmd: [bunExe(), "--silent", ...runCmd, "where-node"],
env: bunEnv,
stderr: "inherit",
stdout: "pipe",
stdin: "ignore",
cwd,
});
const nodeBin = result.stdout.toString().trim();
if (isWindows) {
expect(realpathSync(nodeBin)).toContain("\\bun-node-");
} else {
expect(realpathSync(nodeBin)).toBe(realpathSync(execPath));
}
});
test("NOT autoload local bunfig.toml (sub cwd)", async () => {
const runCmd = cmd === "bun" ? ["run"] : [];
const bunfig = toTOMLString({
run: {
bun: true,
},
});
const cwd = tempDirWithFiles("run.where.node", {
"bunfig.toml": bunfig,
"package.json": JSON.stringify(
{
scripts: {
"where-node": `which node`,
},
},
null,
2,
),
"subdir/a.txt": "a",
});
const result = Bun.spawnSync({
cmd: [bunExe(), "--silent", ...runCmd, "where-node"],
env: bunEnv,
stderr: "inherit",
stdout: "pipe",
stdin: "ignore",
cwd: pathJoin(cwd, "./subdir"),
});
const nodeBin = result.stdout.toString().trim();
expect(realpathSync(nodeBin)).toBe(realpathSync(node));
expect(result.success).toBeTrue();
});
test("NOT autoload home bunfig.toml", async () => {
const runCmd = cmd === "bun" ? ["run"] : [];
const bunfig = toTOMLString({
run: {
bun: true,
},
});
const cwd = tempDirWithFiles("run.where.node", {
"my-home/.bunfig.toml": bunfig,
"package.json": JSON.stringify(
{
scripts: {
"where-node": `which node`,
},
},
null,
2,
),
});
const result = Bun.spawnSync({
cmd: [bunExe(), "--silent", ...runCmd, "where-node"],
env: {
...bunEnv,
HOME: pathJoin(cwd, "./my-home"),
},
stderr: "inherit",
stdout: "pipe",
stdin: "ignore",
cwd,
});
const nodeBin = result.stdout.toString().trim();
expect(realpathSync(nodeBin)).toBe(realpathSync(node));
expect(result.success).toBeTrue();
});
});

View File

@@ -13,7 +13,9 @@ import {
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach();
});
afterEach(dummyAfterEach);
test("security scanner blocks bun update on fatal advisory", async () => {

View File

@@ -17,7 +17,9 @@ import {
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach();
});
afterEach(dummyAfterEach);
expect.extend({
@@ -25,7 +27,7 @@ expect.extend({
toHaveBins,
});
for (const { input } of [{ input: { baz: "~0.0.3", moo: "~0.1.0" } }, { input: { baz: "^0.0.3", moo: "^0.1.0" } }]) {
for (const { input } of [{ input: { baz: "~0.0.3", moo: "~0.1.0" } }]) {
it(`should update to latest version of dependency (${input.baz[0]})`, async () => {
const urls: string[] = [];
const tilde = input.baz[0] === "~";
@@ -42,6 +44,7 @@ for (const { input } of [{ input: { baz: "~0.0.3", moo: "~0.1.0" } }, { input: {
},
latest: "0.0.3",
};
console.log({ package_dir });
setHandler(dummyRegistry(urls, registry));
await writeFile(
join(package_dir, "package.json"),
@@ -64,6 +67,7 @@ for (const { input } of [{ input: { baz: "~0.0.3", moo: "~0.1.0" } }, { input: {
stderr: "pipe",
env,
});
const err1 = await new Response(stderr1).text();
expect(err1).not.toContain("error:");
expect(err1).toContain("Saved lockfile");

View File

@@ -136,6 +136,44 @@ test("dependency on workspace without version in package.json", async () => {
}
}, 20_000);
test("allowing negative workspace patterns", async () => {
await Promise.all([
write(
join(packageDir, "package.json"),
JSON.stringify({
name: "root",
workspaces: ["packages/*", "!packages/pkg2"],
}),
),
write(
join(packageDir, "packages", "pkg1", "package.json"),
JSON.stringify({
name: "pkg1",
dependencies: {
"no-deps": "1.0.0",
},
}),
),
write(
join(packageDir, "packages", "pkg2", "package.json"),
JSON.stringify({
name: "pkg2",
dependencies: {
"doesnt-exist-oops": "1.2.3",
},
}),
),
]);
const { exited } = await runBunInstall(env, packageDir);
expect(await exited).toBe(0);
expect(await file(join(packageDir, "node_modules", "no-deps", "package.json")).json()).toEqual({
name: "no-deps",
version: "1.0.0",
});
});
test("dependency on same name as workspace and dist-tag", async () => {
await Promise.all([
write(

View File

@@ -126,7 +126,7 @@ export function getPort() {
let packageDirGetter: () => string = () => {
return tmpdirSync();
};
export async function dummyBeforeEach() {
export async function dummyBeforeEach(opts?: { linker: "hoisted" | "isolated" }) {
resetHandler();
requested = 0;
package_dir = packageDirGetter();
@@ -137,6 +137,7 @@ export async function dummyBeforeEach() {
cache = false
registry = "http://localhost:${server.port}/"
saveTextLockfile = false
${opts ? `linker = "${opts.linker}"` : ""}
`,
);
}

View File

@@ -226,6 +226,41 @@ test("handles cyclic dependencies", async () => {
});
});
test("package with dependency on previous self works", async () => {
const { packageJson, packageDir } = await registry.createTestDir({ bunfigOpts: { isolated: true } });
await write(
packageJson,
JSON.stringify({
name: "test-transitive-self-dep",
dependencies: {
"self-dep": "1.0.2",
},
}),
);
await runBunInstall(bunEnv, packageDir);
expect(
await Promise.all([
file(join(packageDir, "node_modules", "self-dep", "package.json")).json(),
file(join(packageDir, "node_modules", "self-dep", "node_modules", "self-dep", "package.json")).json(),
]),
).toEqual([
{
name: "self-dep",
version: "1.0.2",
dependencies: {
"self-dep": "1.0.1",
},
},
{
name: "self-dep",
version: "1.0.1",
},
]);
});
test("can install folder dependencies", async () => {
const { packageJson, packageDir } = await registry.createTestDir({ bunfigOpts: { isolated: true } });
@@ -268,6 +303,46 @@ test("can install folder dependencies", async () => {
).toBe("module.exports = 'hello from pkg-1';");
});
test("can install folder dependencies on root package", async () => {
const { packageDir, packageJson } = await registry.createTestDir({ bunfigOpts: { isolated: true } });
await Promise.all([
write(
packageJson,
JSON.stringify({
name: "root-file-dep",
workspaces: ["packages/*"],
dependencies: {
self: "file:.",
},
}),
),
write(
join(packageDir, "packages", "pkg1", "package.json"),
JSON.stringify({
name: "pkg1",
dependencies: {
root: "file:../..",
},
}),
),
]);
await runBunInstall(bunEnv, packageDir);
expect(
await Promise.all([
readlink(join(packageDir, "node_modules", "self")),
readlink(join(packageDir, "packages", "pkg1", "node_modules", "root")),
file(join(packageDir, "node_modules", "self", "package.json")).json(),
]),
).toEqual([
join(".bun", "root-file-dep@root", "node_modules", "root-file-dep"),
join("..", "..", "..", "node_modules", ".bun", "root-file-dep@root", "node_modules", "root-file-dep"),
await file(packageJson).json(),
]);
});
describe("isolated workspaces", () => {
test("basic", async () => {
const { packageJson, packageDir } = await registry.createTestDir({ bunfigOpts: { isolated: true } });
@@ -455,6 +530,140 @@ for (const backend of ["clonefile", "hardlink", "copyfile"]) {
});
}
describe("existing node_modules, missing node_modules/.bun", () => {
test("root and workspace node_modules are reset", async () => {
const { packageDir } = await registry.createTestDir({
bunfigOpts: { isolated: true },
files: {
"package.json": JSON.stringify({
name: "delete-node-modules",
workspaces: ["packages/*"],
dependencies: {
"no-deps": "1.0.0",
"a-dep": "1.0.1",
},
}),
"packages/pkg1/package.json": JSON.stringify({
name: "pkg1",
dependencies: {
"no-deps": "1.0.1",
},
}),
"packages/pkg2/package.json": JSON.stringify({
name: "pkg2",
dependencies: {
"no-deps": "2.0.0",
},
}),
"node_modules/oops": "delete me!",
"packages/pkg1/node_modules/oops1": "delete me!",
"packages/pkg2/node_modules/oops2": "delete me!",
},
});
let { exited } = spawn({
cmd: [bunExe(), "install"],
cwd: packageDir,
env: bunEnv,
stdout: "ignore",
stderr: "ignore",
});
expect(await exited).toBe(0);
expect(
await Promise.all([
readdirSorted(join(packageDir, "node_modules")),
readdirSorted(join(packageDir, "packages", "pkg1", "node_modules")),
readdirSorted(join(packageDir, "packages", "pkg2", "node_modules")),
]),
).toEqual([[".bun", expect.stringContaining(".old_modules-"), "a-dep", "no-deps"], ["no-deps"], ["no-deps"]]);
});
test("some workspaces don't have node_modules", async () => {
const { packageDir } = await registry.createTestDir({
bunfigOpts: { isolated: true },
files: {
"package.json": JSON.stringify({
name: "missing-workspace-node_modules",
workspaces: ["packages/*"],
dependencies: {
"no-deps": "1.0.0",
},
}),
"node_modules/hi": "BUN",
"packages/pkg1/package.json": JSON.stringify({
name: "pkg-one",
dependencies: {
"no-deps": "2.0.0",
},
}),
"packages/pkg1/node_modules/foo": "HI",
"packages/pkg2/package.json": JSON.stringify({
name: "pkg-two",
dependencies: {
"a-dep": "1.0.1",
},
}),
},
});
let { exited } = spawn({
cmd: [bunExe(), "install"],
cwd: packageDir,
env: bunEnv,
stdout: "ignore",
stderr: "ignore",
});
expect(await exited).toBe(0);
expect(
await Promise.all([
readdirSorted(join(packageDir, "node_modules")),
readdirSorted(join(packageDir, "packages", "pkg1", "node_modules")),
readdirSorted(join(packageDir, "packages", "pkg2", "node_modules")),
]),
).toEqual([[".bun", expect.stringContaining(".old_modules-"), "no-deps"], ["no-deps"], ["a-dep"]]);
// another install will not reset the node_modules
const entries = await readdirSorted(join(packageDir, "node_modules"));
for (const entry of entries) {
if (entry.startsWith(".old_modules-")) {
await rm(join(packageDir, "node_modules", entry), { recursive: true, force: true });
}
}
expect(await readdirSorted(join(packageDir, "node_modules"))).toEqual([".bun", "no-deps"]);
// add things to workspace node_modules. these will go undetected
await Promise.all([
write(join(packageDir, "packages", "pkg1", "node_modules", "oops1"), "HI1"),
write(join(packageDir, "packages", "pkg2", "node_modules", "oops2"), "HI2"),
]);
({ exited } = spawn({
cmd: [bunExe(), "install"],
cwd: packageDir,
env: bunEnv,
stdout: "ignore",
stderr: "ignore",
}));
expect(await exited).toBe(0);
expect(
await Promise.all([
readdirSorted(join(packageDir, "node_modules")),
readdirSorted(join(packageDir, "packages", "pkg1", "node_modules")),
readdirSorted(join(packageDir, "packages", "pkg2", "node_modules")),
]),
).toEqual([
[".bun", "no-deps"],
["no-deps", "oops1"],
["a-dep", "oops2"],
]);
});
});
describe("--linker flag", () => {
test("can override linker from bunfig", async () => {
const { packageJson, packageDir } = await registry.createTestDir({ bunfigOpts: { isolated: true } });

View File

@@ -17,7 +17,9 @@ import {
beforeAll(dummyBeforeAll);
afterAll(dummyAfterAll);
beforeEach(dummyBeforeEach);
beforeEach(async () => {
await dummyBeforeEach();
});
afterEach(dummyAfterEach);
it.each(["bun.lockb", "bun.lock"])("should not download tarballs with --lockfile-only using %s", async lockfile => {

View File

@@ -79,7 +79,7 @@ exports[`yarn.lock migration basic complex yarn.lock with multiple dependencies
"fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="],
"fsevents": ["fsevents@2.3.3", "", {}, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
"fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="],
"get-intrinsic": ["get-intrinsic@1.2.2", "", { "dependencies": { "function-bind": "^1.1.2", "has-proto": "^1.0.1", "has-symbols": "^1.0.3", "hasown": "^2.0.0" } }, "sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA=="],
@@ -290,7 +290,7 @@ exports[`yarn.lock migration basic migration with realistic complex yarn.lock: c
"eslint": ["eslint@8.35.0", "", { "dependencies": { "@eslint/eslintrc": "^2.0.0", "@eslint/js": "8.35.0", "@humanwhocodes/config-array": "^0.11.8", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.1.1", "eslint-utils": "^3.0.0", "eslint-visitor-keys": "^3.3.0", "espree": "^9.4.0", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", "grapheme-splitter": "^1.0.4", "ignore": "^5.2.0", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "js-sdsl": "^4.1.4", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.1", "regexpp": "^3.2.0", "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" } }, "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw=="],
"fsevents": ["fsevents@2.3.2", "", {}, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],
"fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
@@ -1176,7 +1176,7 @@ exports[`bun pm migrate for existing yarn.lock yarn-cli-repo: yarn-cli-repo 1`]
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="],
"fsevents": ["fsevents@1.2.4", "", { "dependencies": { "nan": "^2.9.2", "node-pre-gyp": "^0.10.0" } }, "sha512-z8H8/diyk76B7q5wg+Ud0+CqzcAF3mBBI/bA5ne5zrRUUIvNkJY//D3BqyH571KuAC4Nr7Rw7CjWX4r0y9DvNg=="],
"fsevents": ["fsevents@1.2.4", "", { "dependencies": { "nan": "^2.9.2", "node-pre-gyp": "^0.10.0" }, "os": "darwin" }, "sha512-z8H8/diyk76B7q5wg+Ud0+CqzcAF3mBBI/bA5ne5zrRUUIvNkJY//D3BqyH571KuAC4Nr7Rw7CjWX4r0y9DvNg=="],
"function-bind": ["function-bind@1.1.1", "", {}, "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="],
@@ -3161,3 +3161,34 @@ exports[`bun pm migrate for existing yarn.lock yarn-stuff/abbrev-link-target: ya
}
"
`;
exports[`bun pm migrate for existing yarn.lock yarn.lock with packages that have os/cpu requirements: os-cpu-yarn-migration 1`] = `
"{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "os-cpu-test",
"dependencies": {
"fsevents": "^2.3.2",
"esbuild": "^0.17.0",
},
},
},
"packages": {
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.17.19", "", { "os": "android", "cpu": "arm64" }, "sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA=="],
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.17.19", "", { "os": "darwin", "cpu": "arm64" }, "sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg=="],
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.17.19", "", { "os": "darwin", "cpu": "x64" }, "sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw=="],
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.17.19", "", { "os": "linux", "cpu": "arm64" }, "sha512-ct1Mj/VEUqd5+2h0EBPdMzNdGXnGxbLPg6H5TF8xsHY4X5UAP0FUbDKJhtKu+6iLpIjKjWEvb5XrFyZdVy9OTg=="],
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.17.19", "", { "os": "linux", "cpu": "x64" }, "sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw=="],
"esbuild": ["esbuild@0.17.19", "", { "optionalDependencies": { "@esbuild/android-arm": "0.17.19", "@esbuild/android-arm64": "0.17.19", "@esbuild/android-x64": "0.17.19", "@esbuild/darwin-arm64": "0.17.19", "@esbuild/darwin-x64": "0.17.19", "@esbuild/freebsd-arm64": "0.17.19", "@esbuild/freebsd-x64": "0.17.19", "@esbuild/linux-arm": "0.17.19", "@esbuild/linux-arm64": "0.17.19", "@esbuild/linux-ia32": "0.17.19", "@esbuild/linux-loong64": "0.17.19", "@esbuild/linux-mips64el": "0.17.19", "@esbuild/linux-ppc64": "0.17.19", "@esbuild/linux-riscv64": "0.17.19", "@esbuild/linux-s390x": "0.17.19", "@esbuild/linux-x64": "0.17.19", "@esbuild/netbsd-x64": "0.17.19", "@esbuild/openbsd-x64": "0.17.19", "@esbuild/sunos-x64": "0.17.19", "@esbuild/win32-arm64": "0.17.19", "@esbuild/win32-ia32": "0.17.19", "@esbuild/win32-x64": "0.17.19" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw=="],
"fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],
}
}
"
`;

View File

@@ -54,6 +54,25 @@ test("basic", async () => {
expect(err).not.toContain("Saved lockfile");
});
test("version is number with dot", async () => {
const { packageDir } = await verdaccio.createTestDir({
files: join(import.meta.dir, "pnpm/version-number-dot"),
});
let proc = spawn({
cmd: [bunExe(), "install"],
cwd: packageDir,
env,
stdout: "pipe",
stderr: "pipe",
});
let [err, exitCode] = await Promise.all([proc.stderr.text(), proc.exited]);
expect(exitCode).toBe(0);
expect(err).toContain("pnpm-lock.yaml version is too old (< v7)");
});
describe.todo("bin", () => {
test("manifests are fetched for bins", async () => {
const { packageDir, packageJson } = await verdaccio.createTestDir({

View File

@@ -0,0 +1 @@
lockfileVersion: 5.4

View File

@@ -1372,4 +1372,111 @@ describe("bun pm migrate for existing yarn.lock", () => {
const bunLockContent = await Bun.file(join(tempDir, "bun.lock")).text();
expect(bunLockContent).toMatchSnapshot(folder);
});
test("yarn.lock with packages that have os/cpu requirements", async () => {
const tempDir = tempDirWithFiles("yarn-migration-os-cpu", {
"package.json": JSON.stringify(
{
name: "os-cpu-test",
version: "1.0.0",
dependencies: {
fsevents: "^2.3.2",
esbuild: "^0.17.0",
},
},
null,
2,
),
"yarn.lock": `# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@esbuild/android-arm64@0.17.19":
version "0.17.19"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd"
integrity sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==
"@esbuild/darwin-arm64@0.17.19":
version "0.17.19"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276"
integrity sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==
"@esbuild/darwin-x64@0.17.19":
version "0.17.19"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb"
integrity sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==
"@esbuild/linux-arm64@0.17.19":
version "0.17.19"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb"
integrity sha512-ct1Mj/VEUqd5+2h0EBPdMzNdGXnGxbLPg6H5TF8xsHY4X5UAP0FUbDKJhtKu+6iLpIjKjWEvb5XrFyZdVy9OTg==
"@esbuild/linux-x64@0.17.19":
version "0.17.19"
resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4"
integrity sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==
esbuild@^0.17.0:
version "0.17.19"
resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.17.19.tgz#087a727e98299f0462a3d0bcdd9cd7ff100bd955"
integrity sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==
optionalDependencies:
"@esbuild/android-arm" "0.17.19"
"@esbuild/android-arm64" "0.17.19"
"@esbuild/android-x64" "0.17.19"
"@esbuild/darwin-arm64" "0.17.19"
"@esbuild/darwin-x64" "0.17.19"
"@esbuild/freebsd-arm64" "0.17.19"
"@esbuild/freebsd-x64" "0.17.19"
"@esbuild/linux-arm" "0.17.19"
"@esbuild/linux-arm64" "0.17.19"
"@esbuild/linux-ia32" "0.17.19"
"@esbuild/linux-loong64" "0.17.19"
"@esbuild/linux-mips64el" "0.17.19"
"@esbuild/linux-ppc64" "0.17.19"
"@esbuild/linux-riscv64" "0.17.19"
"@esbuild/linux-s390x" "0.17.19"
"@esbuild/linux-x64" "0.17.19"
"@esbuild/netbsd-x64" "0.17.19"
"@esbuild/openbsd-x64" "0.17.19"
"@esbuild/sunos-x64" "0.17.19"
"@esbuild/win32-arm64" "0.17.19"
"@esbuild/win32-ia32" "0.17.19"
"@esbuild/win32-x64" "0.17.19"
fsevents@^2.3.2:
version "2.3.2"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a"
integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==
`,
});
// Run bun pm migrate
const migrateResult = await Bun.spawn({
cmd: [bunExe(), "pm", "migrate", "-f"],
cwd: tempDir,
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(migrateResult.stdout).text(),
new Response(migrateResult.stderr).text(),
migrateResult.exited,
]);
expect(exitCode).toBe(0);
expect(fs.existsSync(join(tempDir, "bun.lock"))).toBe(true);
const bunLockContent = fs.readFileSync(join(tempDir, "bun.lock"), "utf8");
expect(bunLockContent).toMatchSnapshot("os-cpu-yarn-migration");
// Verify that the lockfile contains the expected os/cpu metadata by checking the snapshot
// fsevents should have darwin os constraint, esbuild packages should have arch constraints
expect(bunLockContent).toContain("fsevents");
expect(bunLockContent).toContain("@esbuild/linux-arm64");
expect(bunLockContent).toContain("@esbuild/darwin-arm64");
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,65 @@
{
"name": "self-dep",
"versions": {
"1.0.1": {
"name": "self-dep",
"version": "1.0.1",
"_id": "self-dep@1.0.1",
"_integrity": "sha512-X9HQMiuvWXqhxJExWwQz5X+z901PvtvYVVAsEli2k5FDVOg2j6opnkQjha5iDTWbqBrG1m95N3rKwmq3OftU/Q==",
"_nodeVersion": "24.3.0",
"_npmVersion": "10.8.3",
"integrity": "sha512-X9HQMiuvWXqhxJExWwQz5X+z901PvtvYVVAsEli2k5FDVOg2j6opnkQjha5iDTWbqBrG1m95N3rKwmq3OftU/Q==",
"shasum": "b95f3e460b2f2ede25a18cc3c25bc226a3edfc71",
"dist": {
"integrity": "sha512-X9HQMiuvWXqhxJExWwQz5X+z901PvtvYVVAsEli2k5FDVOg2j6opnkQjha5iDTWbqBrG1m95N3rKwmq3OftU/Q==",
"shasum": "b95f3e460b2f2ede25a18cc3c25bc226a3edfc71",
"tarball": "http://http://localhost:4873/self-dep/-/self-dep-1.0.1.tgz"
},
"contributors": []
},
"1.0.2": {
"name": "self-dep",
"version": "1.0.2",
"dependencies": {
"self-dep": "1.0.1"
},
"_id": "self-dep@1.0.2",
"_integrity": "sha512-idMxfr8aIs5CwIVOMTykKRK7MBURv62AjBZ+zRH2zPOZMsWbe+sBXha0zPhQNfP7cUWccF3yiSvs0AQwQXGKfA==",
"_nodeVersion": "24.3.0",
"_npmVersion": "10.8.3",
"integrity": "sha512-idMxfr8aIs5CwIVOMTykKRK7MBURv62AjBZ+zRH2zPOZMsWbe+sBXha0zPhQNfP7cUWccF3yiSvs0AQwQXGKfA==",
"shasum": "d1bc984e927fd960511dbef211551408e3bb2f72",
"dist": {
"integrity": "sha512-idMxfr8aIs5CwIVOMTykKRK7MBURv62AjBZ+zRH2zPOZMsWbe+sBXha0zPhQNfP7cUWccF3yiSvs0AQwQXGKfA==",
"shasum": "d1bc984e927fd960511dbef211551408e3bb2f72",
"tarball": "http://http://localhost:4873/self-dep/-/self-dep-1.0.2.tgz"
},
"contributors": []
}
},
"time": {
"modified": "2025-10-03T21:30:17.221Z",
"created": "2025-10-03T21:30:02.446Z",
"1.0.1": "2025-10-03T21:30:02.446Z",
"1.0.2": "2025-10-03T21:30:17.221Z"
},
"users": {},
"dist-tags": {
"latest": "1.0.2"
},
"_uplinks": {},
"_distfiles": {},
"_attachments": {
"self-dep-1.0.1.tgz": {
"shasum": "b95f3e460b2f2ede25a18cc3c25bc226a3edfc71",
"version": "1.0.1"
},
"self-dep-1.0.2.tgz": {
"shasum": "d1bc984e927fd960511dbef211551408e3bb2f72",
"version": "1.0.2"
}
},
"_rev": "",
"_id": "self-dep",
"readme": ""
}

View File

@@ -0,0 +1,69 @@
import { spawn } from "bun";
import { expect, test } from "bun:test";
import { mkdir, readFile, stat, writeFile } from "fs/promises";
import { bunExe, bunEnv as env, isWindows, runBunInstall, tmpdirSync } from "harness";
import { join } from "path";
test.skipIf(isWindows)("bin linking normalizes CRLF in shebang", async () => {
const testDir = tmpdirSync();
const pkgDir = join(testDir, "pkg");
const consumerDir = join(testDir, "consumer");
await mkdir(pkgDir, { recursive: true });
await mkdir(consumerDir, { recursive: true });
// Create package with bin that has CRLF shebang
await writeFile(
join(pkgDir, "package.json"),
JSON.stringify({
name: "test-pkg-crlf",
version: "1.0.0",
bin: {
"test-bin": "test-bin.py",
},
}),
);
// Write bin file with CRLF shebang
await writeFile(join(pkgDir, "test-bin.py"), "#!/usr/bin/env python\r\nprint('hello from python')");
// Link the package
const linkResult = spawn({
cmd: [bunExe(), "link"],
cwd: pkgDir,
env,
stdout: "pipe",
stderr: "pipe",
});
await linkResult.exited;
expect(linkResult.exitCode).toBe(0);
// Create consumer package
await writeFile(
join(consumerDir, "package.json"),
JSON.stringify({
name: "consumer",
version: "1.0.0",
dependencies: {
"test-pkg-crlf": "link:test-pkg-crlf",
},
}),
);
// Install
await runBunInstall(env, consumerDir);
// Check that the linked bin file has normalized shebang
const binPath = join(consumerDir, "node_modules", "test-pkg-crlf", "test-bin.py");
const binContent = await readFile(binPath, "utf-8");
console.log("Bin content first 50 chars:", JSON.stringify(binContent.slice(0, 50)));
expect(binContent).toStartWith("#!/usr/bin/env python\nprint");
expect(binContent).not.toContain("\r\n");
// Verify that the file is executable (bin linking sets this)
const binStat = await stat(binPath);
expect(binStat.mode & 0o111).toBeGreaterThan(0); // At least one execute bit should be set
});

View File

@@ -8,7 +8,7 @@ FNH:0
DA:2,19
DA:3,16
DA:4,1
LF:5
LF:3
LH:3
end_of_record
TN:
@@ -22,7 +22,7 @@ DA:9,0
DA:10,0
DA:11,1
DA:14,9
LF:15
LF:7
LH:5
end_of_record"
`;

View File

@@ -951,8 +951,8 @@ describe("bun test", () => {
`,
});
expect(stderr).toContain('"fs" module > has $method');
expect(stderr).toContain('"path" module > has $method');
expect(stderr).toContain("fs module > has $method");
expect(stderr).toContain("path module > has $method");
expect(stderr).toContain("2 pass");
});
@@ -976,8 +976,8 @@ describe("bun test", () => {
`,
});
expect(stderr).toContain('(pass) user "john_doe" age 30 active true');
expect(stderr).toContain('(pass) user "jane_smith" age 25 active false');
expect(stderr).toContain("(pass) user john_doe age 30 active true");
expect(stderr).toContain("(pass) user jane_smith age 25 active false");
expect(stderr).toContain("2 pass");
});
@@ -1027,8 +1027,8 @@ describe("bun test", () => {
`,
});
expect(stderr).toContain('(pass) "Alice" from "NYC"');
expect(stderr).toContain('(pass) "Bob" from "LA"');
expect(stderr).toContain("(pass) Alice from NYC");
expect(stderr).toContain("(pass) Bob from LA");
expect(stderr).toContain("2 pass");
});
@@ -1056,8 +1056,8 @@ describe("bun test", () => {
`,
});
expect(stderr).toContain('(pass) first user is "Alice"');
expect(stderr).toContain('(pass) first user is "Carol"');
expect(stderr).toContain("(pass) first user is Alice");
expect(stderr).toContain("(pass) first user is Carol");
expect(stderr).toContain("2 pass");
});
@@ -1085,9 +1085,9 @@ describe("bun test", () => {
`,
});
expect(stderr).toContain('"underscore"');
expect(stderr).toContain('"dollar"');
expect(stderr).toContain('"mix"');
expect(stderr).toContain("underscore");
expect(stderr).toContain("dollar");
expect(stderr).toContain("mix");
expect(stderr).toContain("$123invalid");
expect(stderr).toContain("$hasdash");
expect(stderr).toContain("$hasspace");
@@ -1118,7 +1118,7 @@ describe("bun test", () => {
`,
});
expect(stderr).toContain('First user: "Alice" with tag: "admin"');
expect(stderr).toContain("First user: Alice with tag: admin");
});
test("handles missing properties gracefully", () => {
@@ -1460,7 +1460,7 @@ function runTest({
const { stderr, exitCode } = spawnSync({
cwd,
cmd: [bunExe(), "test", ...args],
env: { ...bunEnv, ...env },
env: { ...bunEnv, AGENT: "0", ...env },
stderr: "pipe",
stdout: "ignore",
});

View File

@@ -1,31 +1,38 @@
import { spawnSync } from "bun";
import { expect, test } from "bun:test";
import { beforeAll, expect, test } from "bun:test";
import { bunEnv, bunExe, normalizeBunSnapshot, tempDirWithFiles } from "harness";
let testEnv: NodeJS.Dict<string>;
beforeAll(() => {
testEnv = { ...bunEnv };
delete testEnv.AGENT;
});
test("CLAUDECODE=1 shows quiet test output (only failures)", async () => {
const dir = tempDirWithFiles("claudecode-test-quiet", {
"test2.test.js": `
import { test, expect } from "bun:test";
test("passing test", () => {
expect(1).toBe(1);
});
test("failing test", () => {
expect(1).toBe(2);
});
test.skip("skipped test", () => {
expect(1).toBe(1);
});
test.todo("todo test");
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test", "test2.test.js"],
env: { ...bunEnv, CLAUDECODE: "1" },
env: { ...testEnv, CLAUDECODE: "1" },
cwd: dir,
stderr: "pipe",
stdout: "pipe",
@@ -43,19 +50,19 @@ test("CLAUDECODE=1 vs CLAUDECODE=0 comparison", async () => {
const dir = tempDirWithFiles("claudecode-test-compare", {
"test3.test.js": `
import { test, expect } from "bun:test";
test("passing test", () => {
expect(1).toBe(1);
});
test("another passing test", () => {
expect(2).toBe(2);
});
test.skip("skipped test", () => {
expect(1).toBe(1);
});
test.todo("todo test");
`,
});
@@ -63,7 +70,7 @@ test("CLAUDECODE=1 vs CLAUDECODE=0 comparison", async () => {
// Run with CLAUDECODE=0 (normal output)
const result1 = spawnSync({
cmd: [bunExe(), "test", "test3.test.js"],
env: { ...bunEnv, CLAUDECODE: "0" },
env: { ...testEnv, CLAUDECODE: "0" },
cwd: dir,
stderr: "pipe",
stdout: "pipe",
@@ -72,7 +79,7 @@ test("CLAUDECODE=1 vs CLAUDECODE=0 comparison", async () => {
// Run with CLAUDECODE=1 (quiet output)
const result2 = spawnSync({
cmd: [bunExe(), "test", "test3.test.js"],
env: { ...bunEnv, CLAUDECODE: "1" },
env: { ...testEnv, CLAUDECODE: "1" },
cwd: dir,
stderr: "pipe",
stdout: "pipe",
@@ -116,7 +123,7 @@ test("CLAUDECODE flag handles no test files found", () => {
// Run with CLAUDECODE=0 (normal output) - no test files
const result1 = spawnSync({
cmd: [bunExe(), "test"],
env: { ...bunEnv, CLAUDECODE: "0" },
env: { ...testEnv, CLAUDECODE: "0" },
cwd: dir,
stderr: "pipe",
stdout: "pipe",
@@ -125,7 +132,7 @@ test("CLAUDECODE flag handles no test files found", () => {
// Run with CLAUDECODE=1 (quiet output) - no test files
const result2 = spawnSync({
cmd: [bunExe(), "test"],
env: { ...bunEnv, CLAUDECODE: "1" },
env: { ...testEnv, CLAUDECODE: "1" },
cwd: dir,
stderr: "pipe",
stdout: "pipe",

View File

@@ -387,21 +387,21 @@ FNF:1
FNH:1
DA:2,11
DA:3,17
LF:5
LF:2
LH:2
end_of_record
TN:
SF:test.test.ts
FNF:1
FNH:1
DA:2,60
DA:2,40
DA:3,41
DA:4,39
DA:6,42
DA:7,39
DA:8,36
DA:9,2
LF:10
LF:7
LH:7
end_of_record"
`);

View File

@@ -0,0 +1,99 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("--pass-with-no-tests exits with 0 when no test files found", async () => {
using dir = tempDir("pass-with-no-tests", {
"not-a-test.ts": `console.log("hello");`,
});
const { exited, stderr } = Bun.spawn({
cmd: [bunExe(), "test", "--pass-with-no-tests"],
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
env: bunEnv,
});
const [err, exitCode] = await Promise.all([stderr.text(), exited]);
expect(exitCode).toBe(0);
expect(err).toContain("No tests found!");
});
test("--pass-with-no-tests exits with 0 when filters match no tests", async () => {
using dir = tempDir("pass-with-no-tests-filter", {
"some.test.ts": `import { test } from "bun:test"; test("example", () => {});`,
});
const { exited, stderr } = Bun.spawn({
cmd: [bunExe(), "test", "--pass-with-no-tests", "-t", "nonexistent"],
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
env: bunEnv,
});
const [err, exitCode] = await Promise.all([stderr.text(), exited]);
expect(exitCode).toBe(0);
});
test("without --pass-with-no-tests, exits with 1 when no test files found", async () => {
using dir = tempDir("fail-with-no-tests", {
"not-a-test.ts": `console.log("hello");`,
});
const { exited, stderr } = Bun.spawn({
cmd: [bunExe(), "test"],
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
env: bunEnv,
});
const [err, exitCode] = await Promise.all([stderr.text(), exited]);
expect(exitCode).toBe(1);
expect(err).toContain("No tests found!");
});
test("without --pass-with-no-tests, exits with 1 when filters match no tests", async () => {
using dir = tempDir("fail-with-no-tests-filter", {
"some.test.ts": `import { test } from "bun:test"; test("example", () => {});`,
});
const { exited } = Bun.spawn({
cmd: [bunExe(), "test", "-t", "nonexistent"],
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
env: bunEnv,
});
const exitCode = await exited;
expect(exitCode).toBe(1);
});
test("--pass-with-no-tests still fails when tests fail", async () => {
using dir = tempDir("pass-with-no-tests-but-fail", {
"test.test.ts": `import { test, expect } from "bun:test"; test("failing", () => { expect(1).toBe(2); });`,
});
const { exited } = Bun.spawn({
cmd: [bunExe(), "test", "--pass-with-no-tests"],
cwd: String(dir),
stdout: "pipe",
stderr: "pipe",
stdin: "ignore",
env: bunEnv,
});
const exitCode = await exited;
expect(exitCode).toBe(1);
});

View File

@@ -0,0 +1,132 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
test("--rerun-each should run tests exactly N times", async () => {
using dir = tempDir("test-rerun-each", {
"counter.test.ts": `
import { test, expect } from "bun:test";
// Use a global counter that persists across module reloads
if (!globalThis.testRunCounter) {
globalThis.testRunCounter = 0;
}
test("should increment counter", () => {
globalThis.testRunCounter++;
console.log(\`Run #\${globalThis.testRunCounter}\`);
expect(true).toBe(true);
});
`,
});
// Test with --rerun-each=3
await using proc = Bun.spawn({
cmd: [bunExe(), "test", "counter.test.ts", "--rerun-each=3"],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(exitCode).toBe(0);
// Should see "Run #1", "Run #2", "Run #3" in the output
expect(stdout).toContain("Run #1");
expect(stdout).toContain("Run #2");
expect(stdout).toContain("Run #3");
// Should NOT see "Run #4"
expect(stdout).not.toContain("Run #4");
// Should run exactly 3 tests - check stderr for test summary
const combined = stdout + stderr;
expect(combined).toMatch(/3 pass/);
// Test with --rerun-each=1 (should run once)
await using proc2 = Bun.spawn({
cmd: [bunExe(), "test", "counter.test.ts", "--rerun-each=1"],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout2, stderr2, exitCode2] = await Promise.all([proc2.stdout.text(), proc2.stderr.text(), proc2.exited]);
expect(exitCode2).toBe(0);
const combined2 = stdout2 + stderr2;
expect(combined2).toMatch(/1 pass/);
});
test("--rerun-each should report correct file count", async () => {
using dir = tempDir("test-rerun-each-file-count", {
"test1.test.ts": `
import { test, expect } from "bun:test";
test("test in file 1", () => {
expect(true).toBe(true);
});
`,
});
// Run with --rerun-each=3
await using proc = Bun.spawn({
cmd: [bunExe(), "test", "test1.test.ts", "--rerun-each=3"],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
expect(exitCode).toBe(0);
// Should report "Ran 3 tests across 1 file" not "across 3 files"
const combined = stdout + stderr;
expect(combined).toContain("Ran 3 tests across 1 file");
expect(combined).not.toContain("across 3 files");
});
test("--rerun-each should handle test failures correctly", async () => {
using dir = tempDir("test-rerun-each-fail", {
"fail.test.ts": `
import { test, expect } from "bun:test";
if (!globalThis.failCounter) {
globalThis.failCounter = 0;
}
test("fails on second run", () => {
globalThis.failCounter++;
console.log(\`Attempt #\${globalThis.failCounter}\`);
// Fail on the second run
expect(globalThis.failCounter).not.toBe(2);
});
`,
});
await using proc = Bun.spawn({
cmd: [bunExe(), "test", "fail.test.ts", "--rerun-each=3"],
env: bunEnv,
cwd: String(dir),
stderr: "pipe",
stdout: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([proc.stdout.text(), proc.stderr.text(), proc.exited]);
// Should have non-zero exit code due to failure
expect(exitCode).not.toBe(0);
// Should see all three attempts
expect(stdout).toContain("Attempt #1");
expect(stdout).toContain("Attempt #2");
expect(stdout).toContain("Attempt #3");
// Should report 2 passes and 1 failure - check both stdout and stderr
const combined = stdout + stderr;
expect(combined).toMatch(/2 pass/);
expect(combined).toMatch(/1 fail/);
});

View File

@@ -0,0 +1,263 @@
import { expect, test } from "bun:test";
import { bunEnv, bunExe, tempDir } from "harness";
import { existsSync, readFileSync } from "node:fs";
import { join } from "node:path";
test("BUN_WATCHER_TRACE creates trace file with watch events", async () => {
using dir = tempDir("watcher-trace", {
"script.js": `console.log("ready");`,
});
const traceFile = join(String(dir), "trace.log");
const env = { ...bunEnv, BUN_WATCHER_TRACE: traceFile };
const proc = Bun.spawn({
cmd: [bunExe(), "--watch", "script.js"],
env,
cwd: String(dir),
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
const decoder = new TextDecoder();
let wroteModification = false;
// Wait for the initial run, trigger a change, then wait for the reload
for await (const chunk of proc.stdout) {
const str = decoder.decode(chunk);
if (!wroteModification && str.includes("ready")) {
wroteModification = true;
await Bun.write(join(String(dir), "script.js"), `console.log("modified");`);
continue;
}
if (wroteModification && str.includes("modified")) {
break;
}
}
proc.kill();
await proc.exited;
// Check that trace file was created
expect(existsSync(traceFile)).toBe(true);
const traceContent = readFileSync(traceFile, "utf-8");
const lines = traceContent
.trim()
.split("\n")
.filter(l => l.trim());
// Should have at least one event
expect(lines.length).toBeGreaterThan(0);
// Parse and validate JSON structure
for (const line of lines) {
const event = JSON.parse(line);
// Check required fields exist
expect(event).toHaveProperty("timestamp");
expect(event).toHaveProperty("files");
// Validate types
expect(typeof event.timestamp).toBe("number");
expect(typeof event.files).toBe("object");
// Validate files object structure
for (const [path, fileEvent] of Object.entries(event.files)) {
expect(typeof path).toBe("string");
expect(fileEvent).toHaveProperty("events");
expect(Array.isArray(fileEvent.events)).toBe(true);
// "changed" field is optional
if (fileEvent.changed) {
expect(Array.isArray(fileEvent.changed)).toBe(true);
}
}
}
}, 10000);
test("BUN_WATCHER_TRACE with --watch flag", async () => {
using dir = tempDir("watcher-trace-watch", {
"script.js": `console.log("run", 0);`,
});
const traceFile = join(String(dir), "watch-trace.log");
const env = { ...bunEnv, BUN_WATCHER_TRACE: traceFile };
const proc = Bun.spawn({
cmd: [bunExe(), "--watch", "script.js"],
env,
cwd: String(dir),
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
let i = 0;
for await (const chunk of proc.stdout) {
const str = new TextDecoder().decode(chunk);
if (str.includes(`run ${i}`)) {
i++;
if (i === 3) break; // Stop after 3 runs
await Bun.write(join(String(dir), "script.js"), `console.log("run", ${i});`);
}
}
proc.kill();
await proc.exited;
// Check that trace file was created
expect(existsSync(traceFile)).toBe(true);
const traceContent = readFileSync(traceFile, "utf-8");
const lines = traceContent
.trim()
.split("\n")
.filter(l => l.trim());
// Should have events from watching script.js
expect(lines.length).toBeGreaterThan(0);
// Validate JSON structure and find script.js events
let foundScriptEvent = false;
for (const line of lines) {
const event = JSON.parse(line);
// Check required fields exist
expect(event).toHaveProperty("timestamp");
expect(event).toHaveProperty("files");
// Validate types
expect(typeof event.timestamp).toBe("number");
expect(typeof event.files).toBe("object");
// Check for script.js events
for (const [path, fileEvent] of Object.entries(event.files)) {
expect(fileEvent).toHaveProperty("events");
expect(Array.isArray(fileEvent.events)).toBe(true);
if (
path.includes("script.js") ||
(Array.isArray(fileEvent.changed) && fileEvent.changed.some((f: string) => f?.includes("script.js")))
) {
foundScriptEvent = true;
// Should have write event
expect(fileEvent.events).toContain("write");
}
}
}
expect(foundScriptEvent).toBe(true);
}, 10000);
test("BUN_WATCHER_TRACE with empty path does not create trace", async () => {
using dir = tempDir("watcher-trace-empty", {
"test.js": `console.log("ready");`,
});
const env = { ...bunEnv, BUN_WATCHER_TRACE: "" };
const proc = Bun.spawn({
cmd: [bunExe(), "--watch", "test.js"],
env,
cwd: String(dir),
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
// Wait for first run, then exit
for await (const chunk of proc.stdout) {
const str = new TextDecoder().decode(chunk);
if (str.includes("ready")) {
break;
}
}
proc.kill();
await proc.exited;
// Should not create any trace file in the directory
const files = Array.from(new Bun.Glob("*.log").scanSync({ cwd: String(dir) }));
expect(files.length).toBe(0);
});
test("BUN_WATCHER_TRACE appends across reloads", async () => {
using dir = tempDir("watcher-trace-append", {
"app.js": `console.log("first-0");`,
});
const traceFile = join(String(dir), "append-trace.log");
const env = { ...bunEnv, BUN_WATCHER_TRACE: traceFile };
// First run
const proc1 = Bun.spawn({
cmd: [bunExe(), "--watch", "app.js"],
env,
cwd: String(dir),
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
let i = 0;
for await (const chunk of proc1.stdout) {
const str = new TextDecoder().decode(chunk);
if (str.includes(`first-${i}`)) {
i++;
if (i === 2) break; // Stop after 2 runs
await Bun.write(join(String(dir), "app.js"), `console.log("first-${i}");`);
}
}
proc1.kill();
await proc1.exited;
const firstContent = readFileSync(traceFile, "utf-8");
const firstLines = firstContent
.trim()
.split("\n")
.filter(l => l.trim());
expect(firstLines.length).toBeGreaterThan(0);
// Second run - should append to the same file
const proc2 = Bun.spawn({
cmd: [bunExe(), "--watch", "app.js"],
env,
cwd: String(dir),
stdout: "pipe",
stderr: "inherit",
stdin: "ignore",
});
let j = 0;
for await (const chunk of proc2.stdout) {
const str = new TextDecoder().decode(chunk);
if (str.includes(`second-${j}`)) {
j++;
if (j === 2) break; // Stop after 2 runs
await Bun.write(join(String(dir), "app.js"), `console.log("second-${j}");`);
} else if (str.includes("first-1")) {
// Second process starts with previous file content ("first-1"), trigger first modification
await Bun.write(join(String(dir), "app.js"), `console.log("second-0");`);
}
}
proc2.kill();
await proc2.exited;
const secondContent = readFileSync(traceFile, "utf-8");
const secondLines = secondContent
.trim()
.split("\n")
.filter(l => l.trim());
// Should have more lines after second run
expect(secondLines.length).toBeGreaterThan(firstLines.length);
// All lines should be valid JSON
for (const line of secondLines) {
const event = JSON.parse(line);
expect(event).toHaveProperty("timestamp");
expect(event).toHaveProperty("files");
}
}, 10000);