ci: Retry and detect flaky tests (#15798)

This commit is contained in:
Ashcon Partovi
2024-12-16 17:04:33 -08:00
committed by Kai Tamkun
parent 350ceb1c50
commit 2bbbbef7c8
9 changed files with 368 additions and 342 deletions

View File

@@ -102,6 +102,10 @@ const { values: options, positionals: filters } = parseArgs({
type: "string",
default: undefined,
},
["retries"]: {
type: "string",
default: isCI ? "4" : "0", // N retries = N+1 attempts
},
},
});
@@ -141,7 +145,11 @@ async function runTests() {
let i = 0;
let total = vendorTotal + tests.length + 2;
const results = [];
const okResults = [];
const flakyResults = [];
const failedResults = [];
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
/**
* @param {string} title
@@ -149,43 +157,79 @@ async function runTests() {
* @returns {Promise<TestResult>}
*/
const runTest = async (title, fn) => {
const label = `${getAnsi("gray")}[${++i}/${total}]${getAnsi("reset")} ${title}`;
const result = await startGroup(label, fn);
results.push(result);
const index = ++i;
if (isBuildkite) {
const { ok, error, stdoutPreview } = result;
if (title.startsWith("vendor")) {
const markdown = formatTestToMarkdown({ ...result, testPath: title });
if (markdown) {
reportAnnotationToBuildKite({ label: title, content: markdown, style: "warning", priority: 5 });
}
} else {
const markdown = formatTestToMarkdown(result);
if (markdown) {
reportAnnotationToBuildKite({ label: title, content: markdown, style: "error" });
}
let result, failure, flaky;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
if (attempt > 1) {
await new Promise(resolve => setTimeout(resolve, 5000 + Math.random() * 10_000));
}
if (!ok) {
const label = `${getAnsi("red")}[${i}/${total}] ${title} - ${error}${getAnsi("reset")}`;
startGroup(label, () => {
process.stderr.write(stdoutPreview);
});
result = await startGroup(
attempt === 1
? `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`
: `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title} ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`,
fn,
);
const { ok, stdoutPreview, error } = result;
if (ok) {
if (failure) {
flakyResults.push(failure);
} else {
okResults.push(result);
}
break;
}
const color = attempt >= maxAttempts ? "red" : "yellow";
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
startGroup(label, () => {
process.stderr.write(stdoutPreview);
});
failure ||= result;
flaky ||= true;
if (attempt >= maxAttempts) {
flaky = false;
failedResults.push(failure);
}
}
if (!failure) {
return result;
}
if (isBuildkite) {
// Group flaky tests together, regardless of the title
const context = flaky ? "flaky" : title;
const style = flaky || title.startsWith("vendor") ? "warning" : "error";
if (title.startsWith("vendor")) {
const content = formatTestToMarkdown({ ...failure, testPath: title });
if (content) {
reportAnnotationToBuildKite({ context, label: title, content, style });
}
} else {
const content = formatTestToMarkdown(failure);
if (content) {
reportAnnotationToBuildKite({ context, label: title, content, style });
}
}
}
if (isGithubAction) {
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
if (summaryPath) {
const longMarkdown = formatTestToMarkdown(result);
const longMarkdown = formatTestToMarkdown(failure);
appendFileSync(summaryPath, longMarkdown);
}
const shortMarkdown = formatTestToMarkdown(result, true);
const shortMarkdown = formatTestToMarkdown(failure, true);
appendFileSync("comment.md", shortMarkdown);
}
if (options["bail"] && !result.ok) {
if (options["bail"]) {
process.exit(getExitCode("fail"));
}
@@ -199,7 +243,7 @@ async function runTests() {
}
}
if (results.every(({ ok }) => ok)) {
if (!failedResults.length) {
for (const testPath of tests) {
const title = relative(cwd, join(testsPath, testPath)).replace(/\\/g, "/");
if (title.startsWith("test/js/node/test/parallel/")) {
@@ -270,21 +314,37 @@ async function runTests() {
}
}
const failedTests = results.filter(({ ok }) => !ok);
if (isGithubAction) {
reportOutputToGitHubAction("failing_tests_count", failedTests.length);
const markdown = formatTestToMarkdown(failedTests);
reportOutputToGitHubAction("failing_tests_count", failedResults.length);
const markdown = formatTestToMarkdown(failedResults);
reportOutputToGitHubAction("failing_tests", markdown);
}
if (!isCI) {
!isQuiet && console.log("-------");
!isQuiet && console.log("passing", results.length - failedTests.length, "/", results.length);
for (const { testPath } of failedTests) {
!isQuiet && console.log("-", testPath);
if (!isCI && !isQuiet) {
console.table({
"Total Tests": okResults.length + failedResults.length + flakyResults.length,
"Passed Tests": okResults.length,
"Failing Tests": failedResults.length,
"Flaky Tests": flakyResults.length,
});
if (failedResults.length) {
console.log(`${getAnsi("red")}Failing Tests:${getAnsi("reset")}`);
for (const { testPath } of failedResults) {
console.log(`${getAnsi("red")}- ${testPath}${getAnsi("reset")}`);
}
}
if (flakyResults.length) {
console.log(`${getAnsi("yellow")}Flaky Tests:${getAnsi("reset")}`);
for (const { testPath } of flakyResults) {
console.log(`${getAnsi("yellow")}- ${testPath}${getAnsi("reset")}`);
}
}
}
return results;
// Exclude flaky tests from the final results
return [...okResults, ...failedResults];
}
/**
@@ -1293,6 +1353,7 @@ function listArtifactsFromBuildKite(glob, step) {
/**
* @typedef {object} BuildkiteAnnotation
* @property {string} [context]
* @property {string} label
* @property {string} content
* @property {"error" | "warning" | "info"} [style]
@@ -1303,10 +1364,10 @@ function listArtifactsFromBuildKite(glob, step) {
/**
* @param {BuildkiteAnnotation} annotation
*/
function reportAnnotationToBuildKite({ label, content, style = "error", priority = 3, attempt = 0 }) {
function reportAnnotationToBuildKite({ context, label, content, style = "error", priority = 3, attempt = 0 }) {
const { error, status, signal, stderr } = spawnSync(
"buildkite-agent",
["annotate", "--append", "--style", `${style}`, "--context", `${label}`, "--priority", `${priority}`],
["annotate", "--append", "--style", `${style}`, "--context", `${context || label}`, "--priority", `${priority}`],
{
input: content,
stdio: ["pipe", "ignore", "pipe"],

View File

@@ -1336,7 +1336,7 @@ describe("bundler", () => {
target: "bun",
run: true,
todo: isBroken && isWindows,
debugTimeoutScale: 5,
timeoutScale: 5,
});
itBundled("edgecase/PackageExternalDoNotBundleNodeModules", {
files: {

View File

@@ -689,7 +689,7 @@ describe("bundler", () => {
expect(resolveCount).toBe(5050);
expect(loadCount).toBe(101);
},
debugTimeoutScale: 3,
timeoutScale: 3,
};
});
// itBundled("plugin/ManyPlugins", ({ root }) => {
@@ -832,4 +832,3 @@ describe("bundler", () => {
};
});
});

View File

@@ -6,7 +6,7 @@ import { callerSourceOrigin } from "bun:jsc";
import type { Matchers } from "bun:test";
import * as esbuild from "esbuild";
import { existsSync, mkdirSync, mkdtempSync, readdirSync, readFileSync, realpathSync, rmSync, writeFileSync } from "fs";
import { bunEnv, bunExe, isDebug } from "harness";
import { bunEnv, bunExe, isCI, isDebug } from "harness";
import { tmpdir } from "os";
import path from "path";
import { SourceMapConsumer } from "source-map";
@@ -278,8 +278,6 @@ export interface BundlerTestInput {
/** Multiplier for test timeout */
timeoutScale?: number;
/** Multiplier for test timeout when using bun-debug. Debug builds already have a higher timeout. */
debugTimeoutScale?: number;
/* determines whether or not anything should be passed to outfile, outdir, etc. */
generateOutput?: boolean;
@@ -1663,8 +1661,7 @@ export function itBundled(
id,
() => expectBundled(id, opts as any),
// sourcemap code is slow
(opts.snapshotSourceMap ? (isDebug ? Infinity : 30_000) : isDebug ? 15_000 : 5_000) *
((isDebug ? opts.debugTimeoutScale : opts.timeoutScale) ?? 1),
isCI ? undefined : isDebug ? Infinity : (opts.snapshotSourceMap ? 30_000 : 5_000) * (opts.timeoutScale ?? 1),
);
}
return ref;
@@ -1676,8 +1673,7 @@ itBundled.only = (id: string, opts: BundlerTestInput) => {
id,
() => expectBundled(id, opts as any),
// sourcemap code is slow
(opts.snapshotSourceMap ? (isDebug ? Infinity : 30_000) : isDebug ? 15_000 : 5_000) *
((isDebug ? opts.debugTimeoutScale : opts.timeoutScale) ?? 1),
isCI ? undefined : isDebug ? Infinity : (opts.snapshotSourceMap ? 30_000 : 5_000) * (opts.timeoutScale ?? 1),
);
};

View File

@@ -1,5 +1,5 @@
import { describe, expect, test } from "bun:test";
import { bunEnv, bunExe, isWindows, tempDirWithFiles } from "harness";
import { bunEnv, bunExe, isBroken, isIntelMacOS, isWindows, tempDirWithFiles } from "harness";
import { join } from "path";
test("require.cache is not an empty object literal when inspected", () => {
@@ -32,7 +32,7 @@ test("require.cache does not include unevaluated modules", () => {
expect(exitCode).toBe(0);
});
describe("files transpiled and loaded don't leak the output source code", () => {
describe.skipIf(isBroken && isIntelMacOS)("files transpiled and loaded don't leak the output source code", () => {
test("via require() with a lot of long export names", () => {
let text = "";
for (let i = 0; i < 10000; i++) {

View File

@@ -18,8 +18,10 @@ export const isWindows = process.platform === "win32";
export const isIntelMacOS = isMacOS && process.arch === "x64";
export const isDebug = Bun.version.includes("debug");
export const isCI = process.env.CI !== undefined;
export const isBuildKite = process.env.BUILDKITE === "true";
export const libcFamily = detectLibc.familySync() as "glibc" | "musl";
export const isMusl = isLinux && libcFamily === "musl";
export const isGlibc = isLinux && libcFamily === "glibc";
export const isBuildKite = process.env.BUILDKITE === "true";
export const isVerbose = process.env.DEBUG === "1";
// Use these to mark a test as flaky or broken.

View File

@@ -1,7 +1,7 @@
import { file, gc, Serve, serve, Server } from "bun";
import { afterAll, afterEach, describe, expect, it, mock } from "bun:test";
import { readFileSync, writeFileSync } from "fs";
import { bunEnv, bunExe, dumpStats, isIPv4, isIPv6, isPosix, tls, tmpdirSync } from "harness";
import { bunEnv, bunExe, dumpStats, isBroken, isIntelMacOS, isIPv4, isIPv6, isPosix, tls, tmpdirSync } from "harness";
import { join, resolve } from "path";
// import { renderToReadableStream } from "react-dom/server";
// import app_jsx from "./app.jsx";
@@ -213,92 +213,95 @@ for (let withDelay of [true, false]) {
});
}
}
describe("1000 uploads & downloads in batches of 64 do not leak ReadableStream", () => {
for (let isDirect of [true, false] as const) {
it(
isDirect ? "direct" : "default",
async () => {
const blob = new Blob([new Uint8Array(1024 * 768).fill(123)]);
Bun.gc(true);
describe.todoIf(isBroken && isIntelMacOS)(
"1000 uploads & downloads in batches of 64 do not leak ReadableStream",
() => {
for (let isDirect of [true, false] as const) {
it(
isDirect ? "direct" : "default",
async () => {
const blob = new Blob([new Uint8Array(1024 * 768).fill(123)]);
Bun.gc(true);
const expected = Bun.CryptoHasher.hash("sha256", blob, "base64");
const initialCount = heapStats().objectTypeCounts.ReadableStream || 0;
const expected = Bun.CryptoHasher.hash("sha256", blob, "base64");
const initialCount = heapStats().objectTypeCounts.ReadableStream || 0;
await runTest(
{
async fetch(req) {
var hasher = new Bun.SHA256();
for await (const chunk of req.body) {
await Bun.sleep(0);
hasher.update(chunk);
await runTest(
{
async fetch(req) {
var hasher = new Bun.SHA256();
for await (const chunk of req.body) {
await Bun.sleep(0);
hasher.update(chunk);
}
return new Response(
isDirect
? new ReadableStream({
type: "direct",
async pull(controller) {
await Bun.sleep(0);
controller.write(Buffer.from(hasher.digest("base64")));
await controller.flush();
controller.close();
},
})
: new ReadableStream({
async pull(controller) {
await Bun.sleep(0);
controller.enqueue(Buffer.from(hasher.digest("base64")));
controller.close();
},
}),
);
},
},
async server => {
const count = 1000;
async function callback() {
const response = await fetch(server.url, {
body: blob,
method: "POST",
});
// We are testing for ReadableStream leaks, so we use the ReadableStream here.
const chunks = [];
for await (const chunk of response.body) {
chunks.push(chunk);
}
const digest = Buffer.from(Bun.concatArrayBuffers(chunks)).toString();
expect(digest).toBe(expected);
Bun.gc(false);
}
return new Response(
isDirect
? new ReadableStream({
type: "direct",
async pull(controller) {
await Bun.sleep(0);
controller.write(Buffer.from(hasher.digest("base64")));
await controller.flush();
controller.close();
},
})
: new ReadableStream({
async pull(controller) {
await Bun.sleep(0);
controller.enqueue(Buffer.from(hasher.digest("base64")));
controller.close();
},
}),
{
let remaining = count;
const batchSize = 64;
while (remaining > 0) {
const promises = new Array(count);
for (let i = 0; i < batchSize && remaining > 0; i++) {
promises[i] = callback();
}
await Promise.all(promises);
remaining -= batchSize;
}
}
Bun.gc(true);
dumpStats();
expect(heapStats().objectTypeCounts.ReadableStream).toBeWithin(
Math.max(initialCount - count / 2, 0),
initialCount + count / 2,
);
},
},
async server => {
const count = 1000;
async function callback() {
const response = await fetch(server.url, {
body: blob,
method: "POST",
});
// We are testing for ReadableStream leaks, so we use the ReadableStream here.
const chunks = [];
for await (const chunk of response.body) {
chunks.push(chunk);
}
const digest = Buffer.from(Bun.concatArrayBuffers(chunks)).toString();
expect(digest).toBe(expected);
Bun.gc(false);
}
{
let remaining = count;
const batchSize = 64;
while (remaining > 0) {
const promises = new Array(count);
for (let i = 0; i < batchSize && remaining > 0; i++) {
promises[i] = callback();
}
await Promise.all(promises);
remaining -= batchSize;
}
}
Bun.gc(true);
dumpStats();
expect(heapStats().objectTypeCounts.ReadableStream).toBeWithin(
Math.max(initialCount - count / 2, 0),
initialCount + count / 2,
);
},
);
},
100000,
);
}
});
);
},
100000,
);
}
},
);
[200, 200n, 303, 418, 599, 599n].forEach(statusCode => {
it(`should response with HTTP status code (${statusCode})`, async () => {

View File

@@ -1,5 +1,5 @@
import { describe, expect, it, spyOn } from "bun:test";
import { bunEnv, bunExe, gc, getMaxFD, isIntelMacOS, isWindows, tempDirWithFiles, tmpdirSync } from "harness";
import { bunEnv, bunExe, gc, getMaxFD, isBroken, isIntelMacOS, isWindows, tempDirWithFiles, tmpdirSync } from "harness";
import { isAscii } from "node:buffer";
import fs, {
closeSync,
@@ -2225,110 +2225,73 @@ describe("fs.ReadStream", () => {
});
describe("createWriteStream", () => {
it("simple write stream finishes", async () => {
const path = `${tmpdir()}/fs.test.ts/${Date.now()}.createWriteStream.txt`;
const stream = createWriteStream(path);
it.todoIf(isBroken && isWindows)("simple write stream finishes", async () => {
const streamPath = join(tmpdirSync(), "create-write-stream.txt");
const { promise: done, resolve, reject } = Promise.withResolvers();
const stream = createWriteStream(streamPath);
stream.on("error", reject);
stream.on("finish", resolve);
stream.write("Test file written successfully");
stream.end();
return await new Promise((resolve, reject) => {
stream.on("error", e => {
reject(e);
});
stream.on("finish", () => {
expect(readFileSync(path, "utf8")).toBe("Test file written successfully");
resolve(true);
});
});
await done;
expect(readFileSync(streamPath, "utf8")).toBe("Test file written successfully");
});
it("writing null throws ERR_STREAM_NULL_VALUES", async () => {
const path = `${tmpdir()}/fs.test.ts/${Date.now()}.createWriteStreamNulls.txt`;
const stream = createWriteStream(path);
try {
stream.write(null);
expect(() => {}).toThrow(Error);
} catch (exception: any) {
expect(exception.code).toBe("ERR_STREAM_NULL_VALUES");
}
const streamPath = join(tmpdirSync(), "create-write-stream-nulls.txt");
const stream = createWriteStream(streamPath);
expect.toThrowWithCode(() => stream.write(null), "ERR_STREAM_NULL_VALUES");
});
it("writing null throws ERR_STREAM_NULL_VALUES (objectMode: true)", async () => {
const path = `${tmpdir()}/fs.test.ts/${Date.now()}.createWriteStreamNulls.txt`;
const stream = createWriteStream(path, {
const streamPath = join(tmpdirSync(), "create-write-stream-nulls-object-mode.txt");
const stream = createWriteStream(streamPath, {
// @ts-ignore-next-line
objectMode: true,
});
try {
stream.write(null);
expect(() => {}).toThrow(Error);
} catch (exception: any) {
expect(exception.code).toBe("ERR_STREAM_NULL_VALUES");
}
expect.toThrowWithCode(() => stream.write(null), "ERR_STREAM_NULL_VALUES");
});
it("writing false throws ERR_INVALID_ARG_TYPE", async () => {
const path = `${tmpdir()}/fs.test.ts/${Date.now()}.createWriteStreamFalse.txt`;
const stream = createWriteStream(path);
try {
stream.write(false);
expect(() => {}).toThrow(Error);
} catch (exception: any) {
expect(exception.code).toBe("ERR_INVALID_ARG_TYPE");
}
const streamPath = join(tmpdirSync(), "create-write-stream-false.txt");
const stream = createWriteStream(streamPath);
expect.toThrowWithCode(() => stream.write(false), "ERR_INVALID_ARG_TYPE");
});
it("writing false throws ERR_INVALID_ARG_TYPE (objectMode: true)", async () => {
const path = `${tmpdir()}/fs.test.ts/${Date.now()}.createWriteStreamFalse.txt`;
const stream = createWriteStream(path, {
const streamPath = join(tmpdirSync(), "create-write-stream-false-object-mode.txt");
const stream = createWriteStream(streamPath, {
// @ts-ignore-next-line
objectMode: true,
});
try {
stream.write(false);
expect(() => {}).toThrow(Error);
} catch (exception: any) {
expect(exception.code).toBe("ERR_INVALID_ARG_TYPE");
}
expect.toThrowWithCode(() => stream.write(false), "ERR_INVALID_ARG_TYPE");
});
it("writing in append mode should not truncate the file", async () => {
const path = `${tmpdir()}/fs.test.ts/${Date.now()}.createWriteStreamAppend.txt`;
const stream = createWriteStream(path, {
const streamPath = join(tmpdirSync(), "create-write-stream-append.txt");
const stream = createWriteStream(streamPath, {
// @ts-ignore-next-line
flags: "a",
});
const { promise: done1, resolve: resolve1, reject: reject1 } = Promise.withResolvers();
stream.on("error", reject1);
stream.on("finish", resolve1);
stream.write("first line\n");
stream.end();
await done1;
await new Promise((resolve, reject) => {
stream.on("error", e => {
reject(e);
});
stream.on("finish", () => {
resolve(true);
});
});
const stream2 = createWriteStream(path, {
// @ts-ignore-next-line
flags: "a",
});
const { promise: done2, resolve: resolve2, reject: reject2 } = Promise.withResolvers();
const stream2 = createWriteStream(streamPath, { flags: "a" });
stream2.on("error", reject2);
stream2.on("finish", resolve2);
stream2.write("second line\n");
stream2.end();
await done2;
return await new Promise((resolve, reject) => {
stream2.on("error", e => {
reject(e);
});
stream2.on("finish", () => {
expect(readFileSync(path, "utf8")).toBe("first line\nsecond line\n");
resolve(true);
});
});
expect(readFileSync(streamPath, "utf8")).toBe("first line\nsecond line\n");
});
it("should emit open and call close callback", done => {

View File

@@ -1,6 +1,6 @@
import { spawn, spawnSync } from "bun";
import { beforeAll, describe, expect, it } from "bun:test";
import { bunEnv, bunExe, tmpdirSync, isWindows } from "harness";
import { bunEnv, bunExe, tmpdirSync, isWindows, isMusl, isBroken } from "harness";
import assert from "node:assert";
import fs from "node:fs/promises";
import { join, basename } from "path";
@@ -84,163 +84,165 @@ async function build(
};
}
beforeAll(async () => {
// set up clean directories for our 4 builds
directories.bunRelease = tmpdirSync();
directories.bunDebug = tmpdirSync();
directories.node = tmpdirSync();
directories.badModules = tmpdirSync();
describe.todoIf(isBroken && isMusl)("node:v8", () => {
beforeAll(async () => {
// set up clean directories for our 4 builds
directories.bunRelease = tmpdirSync();
directories.bunDebug = tmpdirSync();
directories.node = tmpdirSync();
directories.badModules = tmpdirSync();
await install(srcDir, directories.bunRelease, Runtime.bun);
await install(srcDir, directories.bunDebug, Runtime.bun);
await install(srcDir, directories.node, Runtime.node);
await install(join(__dirname, "bad-modules"), directories.badModules, Runtime.node);
await install(srcDir, directories.bunRelease, Runtime.bun);
await install(srcDir, directories.bunDebug, Runtime.bun);
await install(srcDir, directories.node, Runtime.node);
await install(join(__dirname, "bad-modules"), directories.badModules, Runtime.node);
const results = await Promise.all([
build(srcDir, directories.bunRelease, Runtime.bun, BuildMode.release),
build(srcDir, directories.bunDebug, Runtime.bun, BuildMode.debug),
build(srcDir, directories.node, Runtime.node, BuildMode.release),
build(join(__dirname, "bad-modules"), directories.badModules, Runtime.node, BuildMode.release),
]);
for (const r of results) {
console.log(r.description, "stdout:");
console.log(r.out);
console.log(r.description, "stderr:");
console.log(r.err);
}
});
const results = await Promise.all([
build(srcDir, directories.bunRelease, Runtime.bun, BuildMode.release),
build(srcDir, directories.bunDebug, Runtime.bun, BuildMode.debug),
build(srcDir, directories.node, Runtime.node, BuildMode.release),
build(join(__dirname, "bad-modules"), directories.badModules, Runtime.node, BuildMode.release),
]);
for (const r of results) {
console.log(r.description, "stdout:");
console.log(r.out);
console.log(r.description, "stderr:");
console.log(r.err);
}
});
describe("module lifecycle", () => {
it("can call a basic native function", () => {
checkSameOutput("test_v8_native_call", []);
});
});
describe("primitives", () => {
it("can create and distinguish between null, undefined, true, and false", () => {
checkSameOutput("test_v8_primitives", []);
});
});
describe("Number", () => {
it("can create small integer", () => {
checkSameOutput("test_v8_number_int", []);
});
// non-i32 v8::Number is not implemented yet
it("can create large integer", () => {
checkSameOutput("test_v8_number_large_int", []);
});
it("can create fraction", () => {
checkSameOutput("test_v8_number_fraction", []);
});
});
describe("String", () => {
it("can create and read back strings with only ASCII characters", () => {
checkSameOutput("test_v8_string_ascii", []);
});
// non-ASCII strings are not implemented yet
it("can create and read back strings with UTF-8 characters", () => {
checkSameOutput("test_v8_string_utf8", []);
});
it("handles replacement correctly in strings with invalid UTF-8 sequences", () => {
checkSameOutput("test_v8_string_invalid_utf8", []);
});
it("can create strings from null-terminated Latin-1 data", () => {
checkSameOutput("test_v8_string_latin1", []);
});
describe("WriteUtf8", () => {
it("truncates the string correctly", () => {
checkSameOutput("test_v8_string_write_utf8", []);
describe("module lifecycle", () => {
it("can call a basic native function", () => {
checkSameOutput("test_v8_native_call", []);
});
});
});
describe("External", () => {
it("can create an external and read back the correct value", () => {
checkSameOutput("test_v8_external", []);
});
});
describe("Object", () => {
it("can create an object and set properties", () => {
checkSameOutput("test_v8_object", []);
});
});
describe("Array", () => {
// v8::Array::New is broken as it still tries to reinterpret locals as JSValues
it.skip("can create an array from a C array of Locals", () => {
checkSameOutput("test_v8_array_new", []);
});
});
describe("ObjectTemplate", () => {
it("creates objects with internal fields", () => {
checkSameOutput("test_v8_object_template", []);
});
});
describe("FunctionTemplate", () => {
it("keeps the data parameter alive", () => {
checkSameOutput("test_v8_function_template", []);
});
});
describe("Function", () => {
it("correctly receives all its arguments from JS", () => {
checkSameOutput("print_values_from_js", [5.0, true, null, false, "meow", {}]);
checkSameOutput("print_native_function", []);
describe("primitives", () => {
it("can create and distinguish between null, undefined, true, and false", () => {
checkSameOutput("test_v8_primitives", []);
});
});
it("correctly receives the this value from JS", () => {
checkSameOutput("call_function_with_weird_this_values", []);
});
});
describe("error handling", () => {
it("throws an error for modules built using the wrong ABI version", () => {
expect(() => require(join(directories.badModules, "build/Release/mismatched_abi_version.node"))).toThrow(
"The module 'mismatched_abi_version' was compiled against a different Node.js ABI version using NODE_MODULE_VERSION 42.",
);
describe("Number", () => {
it("can create small integer", () => {
checkSameOutput("test_v8_number_int", []);
});
// non-i32 v8::Number is not implemented yet
it("can create large integer", () => {
checkSameOutput("test_v8_number_large_int", []);
});
it("can create fraction", () => {
checkSameOutput("test_v8_number_fraction", []);
});
});
it("throws an error for modules with no entrypoint", () => {
expect(() => require(join(directories.badModules, "build/Release/no_entrypoint.node"))).toThrow(
"The module 'no_entrypoint' has no declared entry point.",
);
describe("String", () => {
it("can create and read back strings with only ASCII characters", () => {
checkSameOutput("test_v8_string_ascii", []);
});
// non-ASCII strings are not implemented yet
it("can create and read back strings with UTF-8 characters", () => {
checkSameOutput("test_v8_string_utf8", []);
});
it("handles replacement correctly in strings with invalid UTF-8 sequences", () => {
checkSameOutput("test_v8_string_invalid_utf8", []);
});
it("can create strings from null-terminated Latin-1 data", () => {
checkSameOutput("test_v8_string_latin1", []);
});
describe("WriteUtf8", () => {
it("truncates the string correctly", () => {
checkSameOutput("test_v8_string_write_utf8", []);
});
});
});
});
describe("Global", () => {
it("can create, modify, and read the value from global handles", () => {
checkSameOutput("test_v8_global", []);
describe("External", () => {
it("can create an external and read back the correct value", () => {
checkSameOutput("test_v8_external", []);
});
});
});
describe("HandleScope", () => {
it("can hold a lot of locals", () => {
checkSameOutput("test_many_v8_locals", []);
describe("Object", () => {
it("can create an object and set properties", () => {
checkSameOutput("test_v8_object", []);
});
});
it("keeps GC objects alive", () => {
checkSameOutput("test_handle_scope_gc", []);
}, 10000);
});
describe("EscapableHandleScope", () => {
it("keeps handles alive in the outer scope", () => {
checkSameOutput("test_v8_escapable_handle_scope", []);
describe("Array", () => {
// v8::Array::New is broken as it still tries to reinterpret locals as JSValues
it.skip("can create an array from a C array of Locals", () => {
checkSameOutput("test_v8_array_new", []);
});
});
});
describe("uv_os_getpid", () => {
it.skipIf(isWindows)("returns the same result as getpid on POSIX", () => {
checkSameOutput("test_uv_os_getpid", []);
describe("ObjectTemplate", () => {
it("creates objects with internal fields", () => {
checkSameOutput("test_v8_object_template", []);
});
});
});
describe("uv_os_getppid", () => {
it.skipIf(isWindows)("returns the same result as getppid on POSIX", () => {
checkSameOutput("test_uv_os_getppid", []);
describe("FunctionTemplate", () => {
it("keeps the data parameter alive", () => {
checkSameOutput("test_v8_function_template", []);
});
});
describe("Function", () => {
it("correctly receives all its arguments from JS", () => {
checkSameOutput("print_values_from_js", [5.0, true, null, false, "meow", {}]);
checkSameOutput("print_native_function", []);
});
it("correctly receives the this value from JS", () => {
checkSameOutput("call_function_with_weird_this_values", []);
});
});
describe("error handling", () => {
it("throws an error for modules built using the wrong ABI version", () => {
expect(() => require(join(directories.badModules, "build/Release/mismatched_abi_version.node"))).toThrow(
"The module 'mismatched_abi_version' was compiled against a different Node.js ABI version using NODE_MODULE_VERSION 42.",
);
});
it("throws an error for modules with no entrypoint", () => {
expect(() => require(join(directories.badModules, "build/Release/no_entrypoint.node"))).toThrow(
"The module 'no_entrypoint' has no declared entry point.",
);
});
});
describe("Global", () => {
it("can create, modify, and read the value from global handles", () => {
checkSameOutput("test_v8_global", []);
});
});
describe("HandleScope", () => {
it("can hold a lot of locals", () => {
checkSameOutput("test_many_v8_locals", []);
});
it("keeps GC objects alive", () => {
checkSameOutput("test_handle_scope_gc", []);
}, 10000);
});
describe("EscapableHandleScope", () => {
it("keeps handles alive in the outer scope", () => {
checkSameOutput("test_v8_escapable_handle_scope", []);
});
});
describe("uv_os_getpid", () => {
it.skipIf(isWindows)("returns the same result as getpid on POSIX", () => {
checkSameOutput("test_uv_os_getpid", []);
});
});
describe("uv_os_getppid", () => {
it.skipIf(isWindows)("returns the same result as getppid on POSIX", () => {
checkSameOutput("test_uv_os_getppid", []);
});
});
});