mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 15:08:46 +00:00
Merge queue (#14639)
This commit is contained in:
@@ -2,8 +2,9 @@ import { Database } from "bun:sqlite";
|
||||
import { describe, expect } from "bun:test";
|
||||
import { rmSync } from "fs";
|
||||
import { itBundled } from "./expectBundled";
|
||||
import { isFlaky, isWindows } from "harness";
|
||||
|
||||
describe("bundler", () => {
|
||||
describe.todoIf(isFlaky && isWindows)("bundler", () => {
|
||||
itBundled("compile/HelloWorld", {
|
||||
compile: true,
|
||||
files: {
|
||||
@@ -213,7 +214,7 @@ describe("bundler", () => {
|
||||
},
|
||||
});
|
||||
itBundled("compile/VariousBunAPIs", {
|
||||
todo: process.platform === "win32", // TODO(@paperdave)
|
||||
todo: isWindows, // TODO(@paperdave)
|
||||
compile: true,
|
||||
files: {
|
||||
"/entry.ts": `
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { describe, expect } from "bun:test";
|
||||
import { join } from "node:path";
|
||||
import { itBundled } from "./expectBundled";
|
||||
import { isBroken, isWindows } from "harness";
|
||||
|
||||
describe("bundler", () => {
|
||||
itBundled("edgecase/EmptyFile", {
|
||||
@@ -1344,6 +1345,7 @@ describe("bundler", () => {
|
||||
},
|
||||
target: "bun",
|
||||
run: true,
|
||||
todo: isBroken && isWindows,
|
||||
});
|
||||
itBundled("edgecase/PackageExternalDoNotBundleNodeModules", {
|
||||
files: {
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { spawn } from "bun";
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { bunEnv, bunExe, forEachLine, tempDirWithFiles } from "harness";
|
||||
import { bunEnv, bunExe, forEachLine, isBroken, isWindows, tempDirWithFiles } from "harness";
|
||||
import { writeFile } from "node:fs/promises";
|
||||
import { join } from "node:path";
|
||||
|
||||
describe("--watch works", async () => {
|
||||
describe.todoIf(isBroken && isWindows)("--watch works", async () => {
|
||||
for (const watchedFile of ["entry.js", "tmp.js"]) {
|
||||
test(`with ${watchedFile}`, async () => {
|
||||
const tmpdir_ = tempDirWithFiles("watch-fixture", {
|
||||
|
||||
@@ -23,6 +23,8 @@ import {
|
||||
writeShebangScript,
|
||||
stderrForInstall,
|
||||
tls,
|
||||
isFlaky,
|
||||
isMacOS,
|
||||
} from "harness";
|
||||
import { join, resolve, sep } from "path";
|
||||
import { readdirSorted } from "../dummy.registry";
|
||||
@@ -3406,84 +3408,87 @@ describe("hoisting", async () => {
|
||||
},
|
||||
];
|
||||
for (const { dependencies, expected, situation } of peerTests) {
|
||||
test(`it should hoist ${expected} when ${situation}`, async () => {
|
||||
await writeFile(
|
||||
join(packageDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "foo",
|
||||
dependencies,
|
||||
}),
|
||||
);
|
||||
test.todoIf(isFlaky && isMacOS && situation === "peer ^1.0.2")(
|
||||
`it should hoist ${expected} when ${situation}`,
|
||||
async () => {
|
||||
await writeFile(
|
||||
join(packageDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "foo",
|
||||
dependencies,
|
||||
}),
|
||||
);
|
||||
|
||||
var { stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: packageDir,
|
||||
stdout: "pipe",
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
});
|
||||
var { stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: packageDir,
|
||||
stdout: "pipe",
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
});
|
||||
|
||||
var err = await new Response(stderr).text();
|
||||
var out = await new Response(stdout).text();
|
||||
expect(err).toContain("Saved lockfile");
|
||||
expect(err).not.toContain("not found");
|
||||
expect(err).not.toContain("error:");
|
||||
for (const dep of Object.keys(dependencies)) {
|
||||
expect(out).toContain(`+ ${dep}@${dependencies[dep]}`);
|
||||
}
|
||||
expect(await exited).toBe(0);
|
||||
assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl());
|
||||
var err = await new Response(stderr).text();
|
||||
var out = await new Response(stdout).text();
|
||||
expect(err).toContain("Saved lockfile");
|
||||
expect(err).not.toContain("not found");
|
||||
expect(err).not.toContain("error:");
|
||||
for (const dep of Object.keys(dependencies)) {
|
||||
expect(out).toContain(`+ ${dep}@${dependencies[dep]}`);
|
||||
}
|
||||
expect(await exited).toBe(0);
|
||||
assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl());
|
||||
|
||||
expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected);
|
||||
expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected);
|
||||
|
||||
await rm(join(packageDir, "bun.lockb"));
|
||||
await rm(join(packageDir, "bun.lockb"));
|
||||
|
||||
({ stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: packageDir,
|
||||
stdout: "pipe",
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
}));
|
||||
({ stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: packageDir,
|
||||
stdout: "pipe",
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
}));
|
||||
|
||||
err = await new Response(stderr).text();
|
||||
out = await new Response(stdout).text();
|
||||
expect(err).toContain("Saved lockfile");
|
||||
expect(err).not.toContain("not found");
|
||||
expect(err).not.toContain("error:");
|
||||
if (out.includes("installed")) {
|
||||
console.log("stdout:", out);
|
||||
}
|
||||
expect(out).not.toContain("package installed");
|
||||
expect(await exited).toBe(0);
|
||||
assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl());
|
||||
err = await new Response(stderr).text();
|
||||
out = await new Response(stdout).text();
|
||||
expect(err).toContain("Saved lockfile");
|
||||
expect(err).not.toContain("not found");
|
||||
expect(err).not.toContain("error:");
|
||||
if (out.includes("installed")) {
|
||||
console.log("stdout:", out);
|
||||
}
|
||||
expect(out).not.toContain("package installed");
|
||||
expect(await exited).toBe(0);
|
||||
assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl());
|
||||
|
||||
expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected);
|
||||
expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected);
|
||||
|
||||
await rm(join(packageDir, "node_modules"), { recursive: true, force: true });
|
||||
await rm(join(packageDir, "node_modules"), { recursive: true, force: true });
|
||||
|
||||
({ stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: packageDir,
|
||||
stdout: "pipe",
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
}));
|
||||
({ stdout, stderr, exited } = spawn({
|
||||
cmd: [bunExe(), "install"],
|
||||
cwd: packageDir,
|
||||
stdout: "pipe",
|
||||
stdin: "pipe",
|
||||
stderr: "pipe",
|
||||
env,
|
||||
}));
|
||||
|
||||
err = await new Response(stderr).text();
|
||||
out = await new Response(stdout).text();
|
||||
expect(err).not.toContain("Saved lockfile");
|
||||
expect(err).not.toContain("not found");
|
||||
expect(err).not.toContain("error:");
|
||||
expect(out).not.toContain("package installed");
|
||||
expect(await exited).toBe(0);
|
||||
assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl());
|
||||
err = await new Response(stderr).text();
|
||||
out = await new Response(stdout).text();
|
||||
expect(err).not.toContain("Saved lockfile");
|
||||
expect(err).not.toContain("not found");
|
||||
expect(err).not.toContain("error:");
|
||||
expect(out).not.toContain("package installed");
|
||||
expect(await exited).toBe(0);
|
||||
assertManifestsPopulated(join(packageDir, ".bun-cache"), registryUrl());
|
||||
|
||||
expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected);
|
||||
});
|
||||
expect(await file(join(packageDir, "node_modules", "a-dep", "package.json")).text()).toContain(expected);
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,24 +1,28 @@
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe } from "harness";
|
||||
import { bunEnv, bunExe, isFlaky, isLinux } from "harness";
|
||||
import path from "path";
|
||||
|
||||
test.each([true, false])("processes get killed", async sync => {
|
||||
const { exited, stdout, stderr } = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"test",
|
||||
path.join(import.meta.dir, sync ? "process-kill-fixture-sync.ts" : "process-kill-fixture.ts"),
|
||||
],
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
stdin: "inherit",
|
||||
env: bunEnv,
|
||||
if (isFlaky && isLinux) {
|
||||
test.todo("processes get killed");
|
||||
} else {
|
||||
test.each([true, false])("processes get killed", async sync => {
|
||||
const { exited, stdout, stderr } = Bun.spawn({
|
||||
cmd: [
|
||||
bunExe(),
|
||||
"test",
|
||||
path.join(import.meta.dir, sync ? "process-kill-fixture-sync.ts" : "process-kill-fixture.ts"),
|
||||
],
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
stdin: "inherit",
|
||||
env: bunEnv,
|
||||
});
|
||||
const [out, err, exitCode] = await Promise.all([new Response(stdout).text(), new Response(stderr).text(), exited]);
|
||||
console.log(out);
|
||||
console.log(err);
|
||||
// TODO: figure out how to handle terminatio nexception from spawn sync properly.
|
||||
expect(exitCode).not.toBe(0);
|
||||
expect(out).not.toContain("This should not be printed!");
|
||||
expect(err).toContain("killed 1 dangling process");
|
||||
});
|
||||
const [out, err, exitCode] = await Promise.all([new Response(stdout).text(), new Response(stderr).text(), exited]);
|
||||
console.log(out);
|
||||
console.log(err);
|
||||
// TODO: figure out how to handle terminatio nexception from spawn sync properly.
|
||||
expect(exitCode).not.toBe(0);
|
||||
expect(out).not.toContain("This should not be printed!");
|
||||
expect(err).toContain("killed 1 dangling process");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,42 +1,46 @@
|
||||
import type { Subprocess } from "bun";
|
||||
import { spawn } from "bun";
|
||||
import { afterEach, expect, it } from "bun:test";
|
||||
import { bunEnv, bunExe, tmpdirSync } from "harness";
|
||||
import { bunEnv, bunExe, isBroken, isWindows, tmpdirSync } from "harness";
|
||||
import { rmSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
let watchee: Subprocess;
|
||||
|
||||
for (const dir of ["dir", "©️"]) {
|
||||
it(`should watch files ${dir === "dir" ? "" : "(non-ascii path)"}`, async () => {
|
||||
const cwd = join(tmpdirSync(), dir);
|
||||
const path = join(cwd, "watchee.js");
|
||||
it.todoIf(isBroken && isWindows)(
|
||||
`should watch files ${dir === "dir" ? "" : "(non-ascii path)"}`,
|
||||
async () => {
|
||||
const cwd = join(tmpdirSync(), dir);
|
||||
const path = join(cwd, "watchee.js");
|
||||
|
||||
const updateFile = async (i: number) => {
|
||||
await Bun.write(path, `console.log(${i}, __dirname);`);
|
||||
};
|
||||
const updateFile = async (i: number) => {
|
||||
await Bun.write(path, `console.log(${i}, __dirname);`);
|
||||
};
|
||||
|
||||
let i = 0;
|
||||
await updateFile(i);
|
||||
await Bun.sleep(1000);
|
||||
watchee = spawn({
|
||||
cwd,
|
||||
cmd: [bunExe(), "--watch", "watchee.js"],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "inherit",
|
||||
stdin: "ignore",
|
||||
});
|
||||
|
||||
for await (const line of watchee.stdout) {
|
||||
if (i == 10) break;
|
||||
var str = new TextDecoder().decode(line);
|
||||
expect(str).toContain(`${i} ${cwd}`);
|
||||
i++;
|
||||
let i = 0;
|
||||
await updateFile(i);
|
||||
}
|
||||
rmSync(path);
|
||||
}, 10000);
|
||||
await Bun.sleep(1000);
|
||||
watchee = spawn({
|
||||
cwd,
|
||||
cmd: [bunExe(), "--watch", "watchee.js"],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "inherit",
|
||||
stdin: "ignore",
|
||||
});
|
||||
|
||||
for await (const line of watchee.stdout) {
|
||||
if (i == 10) break;
|
||||
var str = new TextDecoder().decode(line);
|
||||
expect(str).toContain(`${i} ${cwd}`);
|
||||
i++;
|
||||
await updateFile(i);
|
||||
}
|
||||
rmSync(path);
|
||||
},
|
||||
10000,
|
||||
);
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
|
||||
@@ -19,6 +19,13 @@ export const isDebug = Bun.version.includes("debug");
|
||||
export const isCI = process.env.CI !== undefined;
|
||||
export const isBuildKite = process.env.BUILDKITE === "true";
|
||||
|
||||
// Use these to mark a test as flaky or broken.
|
||||
// This will help us keep track of these tests.
|
||||
//
|
||||
// test.todoIf(isFlaky && isMacOS)("this test is flaky");
|
||||
export const isFlaky = isCI;
|
||||
export const isBroken = isCI;
|
||||
|
||||
export const bunEnv: NodeJS.ProcessEnv = {
|
||||
...process.env,
|
||||
GITHUB_ACTIONS: "false",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { afterAll, beforeAll, describe, expect, it, mock, test } from "bun:test";
|
||||
import { fillRepeating, isWindows } from "harness";
|
||||
import { fillRepeating, isBroken, isMacOS, isWindows } from "harness";
|
||||
|
||||
const routes = {
|
||||
"/foo": new Response("foo", {
|
||||
@@ -38,7 +38,7 @@ for (const [path, response] of Object.entries(routes)) {
|
||||
static_responses[path] = await response.clone().blob();
|
||||
}
|
||||
|
||||
describe("static", () => {
|
||||
describe.todoIf(isBroken && isMacOS)("static", () => {
|
||||
let server: Server;
|
||||
let handler = mock(req => {
|
||||
return new Response(req.url, {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { withoutAggressiveGC } from "harness";
|
||||
import { isBroken, isWindows, withoutAggressiveGC } from "harness";
|
||||
import { tmpdir } from "os";
|
||||
import { join } from "path";
|
||||
|
||||
@@ -126,34 +126,38 @@ test("formData uploads roundtrip, without a call to .body", async () => {
|
||||
expect(await (resData.get("file") as Blob).arrayBuffer()).toEqual(await file.arrayBuffer());
|
||||
});
|
||||
|
||||
test("uploads roundtrip with sendfile()", async () => {
|
||||
const hugeTxt = Buffer.allocUnsafe(1024 * 1024 * 32 * "huge".length);
|
||||
hugeTxt.fill("huge");
|
||||
const hash = Bun.CryptoHasher.hash("sha256", hugeTxt, "hex");
|
||||
test.todoIf(isBroken && isWindows)(
|
||||
"uploads roundtrip with sendfile()",
|
||||
async () => {
|
||||
const hugeTxt = Buffer.allocUnsafe(1024 * 1024 * 32 * "huge".length);
|
||||
hugeTxt.fill("huge");
|
||||
const hash = Bun.CryptoHasher.hash("sha256", hugeTxt, "hex");
|
||||
|
||||
const path = join(tmpdir(), "huge.txt");
|
||||
require("fs").writeFileSync(path, hugeTxt);
|
||||
using server = Bun.serve({
|
||||
port: 0,
|
||||
development: false,
|
||||
maxRequestBodySize: hugeTxt.byteLength * 2,
|
||||
async fetch(req) {
|
||||
const hasher = new Bun.CryptoHasher("sha256");
|
||||
for await (let chunk of req.body!) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
return new Response(hasher.digest("hex"));
|
||||
},
|
||||
});
|
||||
const path = join(tmpdir(), "huge.txt");
|
||||
require("fs").writeFileSync(path, hugeTxt);
|
||||
using server = Bun.serve({
|
||||
port: 0,
|
||||
development: false,
|
||||
maxRequestBodySize: hugeTxt.byteLength * 2,
|
||||
async fetch(req) {
|
||||
const hasher = new Bun.CryptoHasher("sha256");
|
||||
for await (let chunk of req.body!) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
return new Response(hasher.digest("hex"));
|
||||
},
|
||||
});
|
||||
|
||||
const resp = await fetch(server.url, {
|
||||
body: Bun.file(path),
|
||||
method: "PUT",
|
||||
});
|
||||
const resp = await fetch(server.url, {
|
||||
body: Bun.file(path),
|
||||
method: "PUT",
|
||||
});
|
||||
|
||||
expect(resp.status).toBe(200);
|
||||
expect(await resp.text()).toBe(hash);
|
||||
}, 10_000);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(await resp.text()).toBe(hash);
|
||||
},
|
||||
10_000,
|
||||
);
|
||||
|
||||
test("missing file throws the expected error", async () => {
|
||||
Bun.gc(true);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { Subprocess } from "bun";
|
||||
import { afterEach, beforeEach, expect, it } from "bun:test";
|
||||
import { bunEnv, bunExe, isDebug } from "harness";
|
||||
import { bunEnv, bunExe, isDebug, isFlaky, isLinux } from "harness";
|
||||
import { join } from "path";
|
||||
|
||||
const payload = Buffer.alloc(512 * 1024, "1").toString("utf-8"); // decent size payload to test memory leak
|
||||
@@ -152,12 +152,12 @@ for (const test_info of [
|
||||
["should not leak memory when buffering the body", callBuffering, false, 64],
|
||||
["should not leak memory when buffering a JSON body", callJSONBuffering, false, 64],
|
||||
["should not leak memory when buffering the body and accessing req.body", callBufferingBodyGetter, false, 64],
|
||||
["should not leak memory when streaming the body", callStreaming, false, 64],
|
||||
["should not leak memory when streaming the body", callStreaming, isFlaky && isLinux, 64],
|
||||
["should not leak memory when streaming the body incompletely", callIncompleteStreaming, false, 64],
|
||||
["should not leak memory when streaming the body and echoing it back", callStreamingEcho, false, 64],
|
||||
] as const) {
|
||||
const [testName, fn, skip, maxMemoryGrowth] = test_info;
|
||||
it(
|
||||
it.todoIf(skip)(
|
||||
testName,
|
||||
async () => {
|
||||
const report = await calculateMemoryLeak(fn);
|
||||
|
||||
Reference in New Issue
Block a user