mirror of
https://github.com/oven-sh/bun
synced 2026-02-10 19:08:50 +00:00
Merge branch 'main' into dylan/text-lockfile
This commit is contained in:
@@ -7,113 +7,22 @@
|
||||
|
||||
import { writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
function getEnv(name, required = true) {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!value && required) {
|
||||
throw new Error(`Missing environment variable: ${name}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function getRepository() {
|
||||
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
|
||||
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
|
||||
if (!match) {
|
||||
throw new Error(`Unsupported repository: ${url}`);
|
||||
}
|
||||
const [, owner, repo] = match;
|
||||
return `${owner}/${repo}`;
|
||||
}
|
||||
|
||||
function getCommit() {
|
||||
return getEnv("BUILDKITE_COMMIT");
|
||||
}
|
||||
|
||||
function getCommitMessage() {
|
||||
return getEnv("BUILDKITE_MESSAGE", false) || "";
|
||||
}
|
||||
|
||||
function getBranch() {
|
||||
return getEnv("BUILDKITE_BRANCH");
|
||||
}
|
||||
|
||||
function getMainBranch() {
|
||||
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
|
||||
}
|
||||
|
||||
function isFork() {
|
||||
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
|
||||
return !!repository && repository !== getEnv("BUILDKITE_REPO");
|
||||
}
|
||||
|
||||
function isMainBranch() {
|
||||
return getBranch() === getMainBranch() && !isFork();
|
||||
}
|
||||
|
||||
function isMergeQueue() {
|
||||
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
|
||||
}
|
||||
|
||||
function isPullRequest() {
|
||||
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
|
||||
}
|
||||
|
||||
async function getChangedFiles() {
|
||||
const repository = getRepository();
|
||||
const head = getCommit();
|
||||
const base = `${head}^1`;
|
||||
|
||||
try {
|
||||
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
|
||||
if (response.ok) {
|
||||
const { files } = await response.json();
|
||||
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
function getBuildUrl() {
|
||||
return getEnv("BUILDKITE_BUILD_URL");
|
||||
}
|
||||
|
||||
async function getBuildIdWithArtifacts() {
|
||||
let depth = 0;
|
||||
let url = getBuildUrl();
|
||||
|
||||
while (url) {
|
||||
const response = await fetch(`${url}.json`, {
|
||||
headers: { "Accept": "application/json" },
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { id, state, prev_branch_build: lastBuild, steps } = await response.json();
|
||||
if (depth++) {
|
||||
if (state === "failed" || state === "passed") {
|
||||
const buildSteps = steps.filter(({ label }) => label.endsWith("build-bun"));
|
||||
if (buildSteps.length) {
|
||||
if (buildSteps.every(({ outcome }) => outcome === "passed")) {
|
||||
return id;
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!lastBuild) {
|
||||
return;
|
||||
}
|
||||
|
||||
url = url.replace(/\/builds\/[0-9]+/, `/builds/${lastBuild["number"]}`);
|
||||
}
|
||||
}
|
||||
import {
|
||||
getCanaryRevision,
|
||||
getChangedFiles,
|
||||
getCommit,
|
||||
getCommitMessage,
|
||||
getLastSuccessfulBuild,
|
||||
getMainBranch,
|
||||
getTargetBranch,
|
||||
isBuildkite,
|
||||
isFork,
|
||||
isMainBranch,
|
||||
isMergeQueue,
|
||||
printEnvironment,
|
||||
spawnSafe,
|
||||
startGroup,
|
||||
} from "../scripts/utils.mjs";
|
||||
|
||||
function toYaml(obj, indent = 0) {
|
||||
const spaces = " ".repeat(indent);
|
||||
@@ -447,59 +356,82 @@ function getPipeline(buildId) {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("Checking environment...");
|
||||
console.log(" - Repository:", getRepository());
|
||||
console.log(" - Branch:", getBranch());
|
||||
console.log(" - Commit:", getCommit());
|
||||
console.log(" - Commit Message:", getCommitMessage());
|
||||
console.log(" - Is Main Branch:", isMainBranch());
|
||||
console.log(" - Is Merge Queue:", isMergeQueue());
|
||||
console.log(" - Is Pull Request:", isPullRequest());
|
||||
printEnvironment();
|
||||
|
||||
const changedFiles = await getChangedFiles();
|
||||
console.log("Checking last successful build...");
|
||||
const lastBuild = await getLastSuccessfulBuild();
|
||||
if (lastBuild) {
|
||||
const { id, path, commit_id: commit } = lastBuild;
|
||||
console.log(" - Build ID:", id);
|
||||
console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString());
|
||||
console.log(" - Commit:", commit);
|
||||
} else {
|
||||
console.log(" - No build found");
|
||||
}
|
||||
|
||||
console.log("Checking changed files...");
|
||||
const baseRef = getCommit();
|
||||
console.log(" - Base Ref:", baseRef);
|
||||
const headRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch();
|
||||
console.log(" - Head Ref:", headRef);
|
||||
|
||||
const changedFiles = await getChangedFiles(undefined, baseRef, headRef);
|
||||
if (changedFiles) {
|
||||
console.log(
|
||||
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
|
||||
);
|
||||
if (changedFiles.length) {
|
||||
changedFiles.forEach(filename => console.log(` - ${filename}`));
|
||||
} else {
|
||||
console.log(" - No changed files");
|
||||
}
|
||||
}
|
||||
|
||||
const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename);
|
||||
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
|
||||
|
||||
const isSkip = () => {
|
||||
console.log("Checking if CI should be skipped...");
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
if (/\[(skip ci|no ci|ci skip|ci no)\]/i.test(message)) {
|
||||
return true;
|
||||
const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
return;
|
||||
}
|
||||
return changedFiles && changedFiles.every(filename => isDocumentationFile(filename));
|
||||
};
|
||||
|
||||
if (isSkip()) {
|
||||
console.log("Skipping CI due to commit message or changed files...");
|
||||
}
|
||||
if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) {
|
||||
console.log(" - Yes, because all changed files are documentation");
|
||||
return;
|
||||
}
|
||||
|
||||
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
|
||||
|
||||
const isSkipBuild = () => {
|
||||
console.log("Checking if build should be skipped...");
|
||||
let skipBuild;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
if (/\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.test(message)) {
|
||||
return true;
|
||||
const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
skipBuild = true;
|
||||
}
|
||||
return changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename));
|
||||
};
|
||||
}
|
||||
if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) {
|
||||
console.log(" - Yes, because all changed files are tests or documentation");
|
||||
skipBuild = true;
|
||||
}
|
||||
|
||||
let buildId;
|
||||
if (isSkipBuild()) {
|
||||
buildId = await getBuildIdWithArtifacts();
|
||||
if (buildId) {
|
||||
console.log("Skipping build due to commit message or changed files...");
|
||||
console.log("Using build artifacts from previous build:", buildId);
|
||||
} else {
|
||||
console.log("Attempted to skip build, but could not find previous build");
|
||||
console.log("Checking if build is a named release...");
|
||||
let buildRelease;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(release|release build|build release)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
buildRelease = true;
|
||||
}
|
||||
}
|
||||
|
||||
const pipeline = getPipeline(buildId);
|
||||
console.log("Generating pipeline...");
|
||||
const pipeline = getPipeline(lastBuild && skipBuild ? lastBuild.id : undefined);
|
||||
const content = toYaml(pipeline);
|
||||
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
|
||||
writeFileSync(contentPath, content);
|
||||
@@ -507,6 +439,15 @@ async function main() {
|
||||
console.log("Generated pipeline:");
|
||||
console.log(" - Path:", contentPath);
|
||||
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
|
||||
|
||||
if (isBuildkite) {
|
||||
console.log("Setting canary revision...");
|
||||
const canaryRevision = buildRelease ? 0 : await getCanaryRevision();
|
||||
await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`]);
|
||||
|
||||
console.log("Uploading pipeline...");
|
||||
await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath]);
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
|
||||
@@ -2,106 +2,10 @@
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_jq() {
|
||||
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
|
||||
}
|
||||
|
||||
function assert_curl() {
|
||||
assert_command "curl" "curl" "https://curl.se/download.html"
|
||||
}
|
||||
|
||||
function assert_node() {
|
||||
assert_command "node" "node" "https://nodejs.org/en/download/"
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_release() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
run_command buildkite-agent meta-data set canary "0"
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ]; then
|
||||
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
|
||||
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
|
||||
if [ "$tag" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
|
||||
if [ "$revision" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
canary="$revision"
|
||||
fi
|
||||
fi
|
||||
run_command buildkite-agent meta-data set canary "$canary"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_pipeline() {
|
||||
local path="$1"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Cannot find pipeline: $path"
|
||||
exit 1
|
||||
fi
|
||||
run_command buildkite-agent pipeline upload "$path"
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
assert_jq
|
||||
assert_curl
|
||||
assert_node
|
||||
assert_release
|
||||
assert_canary
|
||||
|
||||
run_command node ".buildkite/ci.mjs"
|
||||
|
||||
if [ -f ".buildkite/ci.yml" ]; then
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
fi
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 4f92f334956b250a6bc4ecc1529945bacd22d96c)
|
||||
set(WEBKIT_VERSION 73b551e25d97e463e8e2c86cb819b8639fcbda06)
|
||||
endif()
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.1.34",
|
||||
"version": "1.1.35",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
|
||||
@@ -20,42 +20,32 @@ import {
|
||||
rmSync,
|
||||
} from "node:fs";
|
||||
import { spawn, spawnSync } from "node:child_process";
|
||||
import { tmpdir, hostname, userInfo, homedir } from "node:os";
|
||||
import { join, basename, dirname, relative, sep } from "node:path";
|
||||
import { normalize as normalizeWindows } from "node:path/win32";
|
||||
import { isIP } from "node:net";
|
||||
import { parseArgs } from "node:util";
|
||||
import {
|
||||
getBuildLabel,
|
||||
getBuildUrl,
|
||||
getEnv,
|
||||
getFileUrl,
|
||||
getWindowsExitReason,
|
||||
isBuildkite,
|
||||
isCI,
|
||||
isGithubAction,
|
||||
isWindows,
|
||||
printEnvironment,
|
||||
startGroup,
|
||||
tmpdir,
|
||||
unzip,
|
||||
} from "./utils.mjs";
|
||||
import { userInfo } from "node:os";
|
||||
|
||||
const cwd = dirname(import.meta.dirname);
|
||||
const testsPath = join(cwd, "test");
|
||||
|
||||
const spawnTimeout = 5_000;
|
||||
const testTimeout = 3 * 60_000;
|
||||
const integrationTimeout = 5 * 60_000;
|
||||
|
||||
const isLinux = process.platform === "linux";
|
||||
const isMacOS = process.platform === "darwin";
|
||||
const isWindows = process.platform === "win32";
|
||||
|
||||
const isGitHubAction = !!process.env["GITHUB_ACTIONS"];
|
||||
const isBuildKite = !!process.env["BUILDKITE"];
|
||||
const isBuildKiteTestSuite = !!process.env["BUILDKITE_ANALYTICS_TOKEN"];
|
||||
const isCI = !!process.env["CI"] || isGitHubAction || isBuildKite;
|
||||
|
||||
const isAWS =
|
||||
/^ec2/i.test(process.env["USERNAME"]) ||
|
||||
/^ec2/i.test(process.env["USER"]) ||
|
||||
/^(?:ec2|ip)/i.test(process.env["HOSTNAME"]) ||
|
||||
/^(?:ec2|ip)/i.test(getHostname());
|
||||
const isCloud = isAWS;
|
||||
|
||||
const baseUrl = process.env["GITHUB_SERVER_URL"] || "https://github.com";
|
||||
const repository = process.env["GITHUB_REPOSITORY"] || "oven-sh/bun";
|
||||
const pullRequest = /^pull\/(\d+)$/.exec(process.env["GITHUB_REF"])?.[1];
|
||||
const gitSha = getGitSha();
|
||||
const gitRef = getGitRef();
|
||||
|
||||
const cwd = dirname(import.meta.dirname);
|
||||
const testsPath = join(cwd, "test");
|
||||
const tmpPath = getTmpdir();
|
||||
|
||||
const { values: options, positionals: filters } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
@@ -73,11 +63,11 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
},
|
||||
["shard"]: {
|
||||
type: "string",
|
||||
default: process.env["BUILDKITE_PARALLEL_JOB"] || "0",
|
||||
default: getEnv("BUILDKITE_PARALLEL_JOB", false) || "0",
|
||||
},
|
||||
["max-shards"]: {
|
||||
type: "string",
|
||||
default: process.env["BUILDKITE_PARALLEL_JOB_COUNT"] || "1",
|
||||
default: getEnv("BUILDKITE_PARALLEL_JOB_COUNT", false) || "1",
|
||||
},
|
||||
["include"]: {
|
||||
type: "string",
|
||||
@@ -100,37 +90,6 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
},
|
||||
});
|
||||
|
||||
async function printInfo() {
|
||||
console.log("Timestamp:", new Date());
|
||||
console.log("OS:", getOsPrettyText(), getOsEmoji());
|
||||
console.log("Arch:", getArchText(), getArchEmoji());
|
||||
if (isLinux) {
|
||||
console.log("Glibc:", getGlibcVersion());
|
||||
}
|
||||
console.log("Hostname:", getHostname());
|
||||
if (isCI) {
|
||||
console.log("CI:", getCI());
|
||||
console.log("Shard:", options["shard"], "/", options["max-shards"]);
|
||||
console.log("Build URL:", getBuildUrl());
|
||||
console.log("Environment:", process.env);
|
||||
if (isCloud) {
|
||||
console.log("Public IP:", await getPublicIp());
|
||||
console.log("Cloud:", getCloud());
|
||||
}
|
||||
const tailscaleIp = await getTailscaleIp();
|
||||
if (tailscaleIp) {
|
||||
console.log("Tailscale IP:", tailscaleIp);
|
||||
}
|
||||
}
|
||||
console.log("Cwd:", cwd);
|
||||
console.log("Tmpdir:", tmpPath);
|
||||
console.log("Commit:", gitSha);
|
||||
console.log("Ref:", gitRef);
|
||||
if (pullRequest) {
|
||||
console.log("Pull Request:", pullRequest);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<TestResult[]>}
|
||||
@@ -197,27 +156,32 @@ async function runTests() {
|
||||
*/
|
||||
const runTest = async (title, fn) => {
|
||||
const label = `${getAnsi("gray")}[${++i}/${total}]${getAnsi("reset")} ${title}`;
|
||||
const result = await runTask(label, fn);
|
||||
const result = await startGroup(label, fn);
|
||||
results.push(result);
|
||||
|
||||
if (isBuildKite) {
|
||||
if (isBuildkite) {
|
||||
const { ok, error, stdoutPreview } = result;
|
||||
const markdown = formatTestToMarkdown(result);
|
||||
if (markdown) {
|
||||
const style = title.startsWith("vendor") ? "warning" : "error";
|
||||
const priority = title.startsWith("vendor") ? 1 : 5;
|
||||
reportAnnotationToBuildKite({ label: title, content: markdown, style, priority });
|
||||
if (title.startsWith("vendor")) {
|
||||
const markdown = formatTestToMarkdown({ ...result, testPath: title });
|
||||
if (markdown) {
|
||||
reportAnnotationToBuildKite({ label: title, content: markdown, style: "warning", priority: 5 });
|
||||
}
|
||||
} else {
|
||||
const markdown = formatTestToMarkdown(result);
|
||||
if (markdown) {
|
||||
reportAnnotationToBuildKite({ label: title, content: markdown, style: "error" });
|
||||
}
|
||||
}
|
||||
|
||||
if (!ok) {
|
||||
const label = `${getAnsi("red")}[${i}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
await runTask(label, () => {
|
||||
startGroup(label, () => {
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (isGitHubAction) {
|
||||
if (isGithubAction) {
|
||||
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
|
||||
if (summaryPath) {
|
||||
const longMarkdown = formatTestToMarkdown(result);
|
||||
@@ -267,23 +231,24 @@ async function runTests() {
|
||||
|
||||
if (testRunner === "bun") {
|
||||
await runTest(title, () => spawnBunTest(execPath, testPath, { cwd: vendorPath }));
|
||||
} else if (testRunner === "node") {
|
||||
const preload = join(import.meta.dirname, "..", "test", "runners", "node.ts");
|
||||
} else {
|
||||
const testRunnerPath = join(import.meta.dirname, "..", "test", "runners", `${testRunner}.ts`);
|
||||
if (!existsSync(testRunnerPath)) {
|
||||
throw new Error(`Unsupported test runner: ${testRunner}`);
|
||||
}
|
||||
await runTest(title, () =>
|
||||
spawnBun(execPath, {
|
||||
spawnBunTest(execPath, testPath, {
|
||||
cwd: vendorPath,
|
||||
args: ["--preload", preload, testPath],
|
||||
args: ["--preload", testRunnerPath],
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
throw new Error(`Unsupported test runner: ${testRunner}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const failedTests = results.filter(({ ok }) => !ok);
|
||||
if (isGitHubAction) {
|
||||
if (isGithubAction) {
|
||||
reportOutputToGitHubAction("failing_tests_count", failedTests.length);
|
||||
const markdown = formatTestToMarkdown(failedTests);
|
||||
reportOutputToGitHubAction("failing_tests", markdown);
|
||||
@@ -462,7 +427,7 @@ async function spawnSafe(options) {
|
||||
error = "timeout";
|
||||
} else if (exitCode !== 0) {
|
||||
if (isWindows) {
|
||||
const winCode = getWindowsExitCode(exitCode);
|
||||
const winCode = getWindowsExitReason(exitCode);
|
||||
if (winCode) {
|
||||
exitCode = winCode;
|
||||
}
|
||||
@@ -488,14 +453,14 @@ async function spawnSafe(options) {
|
||||
*/
|
||||
async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
const path = addPath(dirname(execPath), process.env.PATH);
|
||||
const tmpdirPath = mkdtempSync(join(tmpPath, "buntmp-"));
|
||||
const { username } = userInfo();
|
||||
const tmpdirPath = mkdtempSync(join(tmpdir(), "buntmp-"));
|
||||
const { username, homedir } = userInfo();
|
||||
const bunEnv = {
|
||||
...process.env,
|
||||
PATH: path,
|
||||
TMPDIR: tmpdirPath,
|
||||
USER: username,
|
||||
HOME: homedir(),
|
||||
HOME: homedir,
|
||||
FORCE_COLOR: "1",
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
@@ -511,23 +476,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
if (env) {
|
||||
Object.assign(bunEnv, env);
|
||||
}
|
||||
// Use Linux namespaces to isolate the child process
|
||||
// https://man7.org/linux/man-pages/man1/unshare.1.html
|
||||
// if (isLinux) {
|
||||
// const { uid, gid } = userInfo();
|
||||
// args = [
|
||||
// `--wd=${cwd}`,
|
||||
// "--user",
|
||||
// `--map-user=${uid}`,
|
||||
// `--map-group=${gid}`,
|
||||
// "--fork",
|
||||
// "--kill-child",
|
||||
// "--pid",
|
||||
// execPath,
|
||||
// ...args,
|
||||
// ];
|
||||
// execPath = "unshare";
|
||||
// }
|
||||
if (isWindows) {
|
||||
delete bunEnv["PATH"];
|
||||
bunEnv["Path"] = path;
|
||||
@@ -592,15 +540,17 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
* @param {string} testPath
|
||||
* @param {object} [options]
|
||||
* @param {string} [options.cwd]
|
||||
* @param {string[]} [options.args]
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
const timeout = getTestTimeout(testPath);
|
||||
const perTestTimeout = Math.ceil(timeout / 2);
|
||||
const isReallyTest = isTestStrict(testPath);
|
||||
const absPath = join(options["cwd"], testPath);
|
||||
const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor");
|
||||
const args = options["args"] ?? [];
|
||||
const { ok, error, stdout } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? ["test", `--timeout=${perTestTimeout}`, absPath] : [absPath],
|
||||
args: isReallyTest ? ["test", ...args, `--timeout=${perTestTimeout}`, absPath] : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env: {
|
||||
@@ -638,9 +588,9 @@ function getTestTimeout(testPath) {
|
||||
* @param {string} chunk
|
||||
*/
|
||||
function pipeTestStdout(io, chunk) {
|
||||
if (isGitHubAction) {
|
||||
if (isGithubAction) {
|
||||
io.write(chunk.replace(/\:\:(?:end)?group\:\:.*(?:\r\n|\r|\n)/gim, ""));
|
||||
} else if (isBuildKite) {
|
||||
} else if (isBuildkite) {
|
||||
io.write(chunk.replace(/(?:---|\+\+\+|~~~|\^\^\^) /gim, " ").replace(/\:\:.*(?:\r\n|\r|\n)/gim, ""));
|
||||
} else {
|
||||
io.write(chunk.replace(/\:\:.*(?:\r\n|\r|\n)/gim, ""));
|
||||
@@ -799,75 +749,6 @@ async function spawnBunInstall(execPath, options) {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getGitSha() {
|
||||
const sha = process.env["GITHUB_SHA"] || process.env["BUILDKITE_COMMIT"];
|
||||
if (sha?.length === 40) {
|
||||
return sha;
|
||||
}
|
||||
try {
|
||||
const { stdout } = spawnSync("git", ["rev-parse", "HEAD"], {
|
||||
encoding: "utf-8",
|
||||
timeout: spawnTimeout,
|
||||
});
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getGitRef() {
|
||||
const ref = process.env["GITHUB_REF_NAME"] || process.env["BUILDKITE_BRANCH"];
|
||||
if (ref) {
|
||||
return ref;
|
||||
}
|
||||
try {
|
||||
const { stdout } = spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], {
|
||||
encoding: "utf-8",
|
||||
timeout: spawnTimeout,
|
||||
});
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
return "<unknown>";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTmpdir() {
|
||||
if (isWindows) {
|
||||
for (const key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP", "RUNNER_TEMP"]) {
|
||||
const tmpdir = process.env[key] || "";
|
||||
// HACK: There are too many bugs with cygwin directories.
|
||||
// We should probably run Windows tests in both cygwin and powershell.
|
||||
if (/cygwin|cygdrive/i.test(tmpdir) || !/^[a-z]/i.test(tmpdir)) {
|
||||
continue;
|
||||
}
|
||||
return normalizeWindows(tmpdir);
|
||||
}
|
||||
const appData = process.env["LOCALAPPDATA"];
|
||||
if (appData) {
|
||||
const appDataTemp = join(appData, "Temp");
|
||||
if (existsSync(appDataTemp)) {
|
||||
return appDataTemp;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isMacOS) {
|
||||
if (existsSync("/tmp")) {
|
||||
return "/tmp";
|
||||
}
|
||||
}
|
||||
return tmpdir();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path
|
||||
* @returns {boolean}
|
||||
@@ -937,6 +818,7 @@ function getTests(cwd) {
|
||||
* @property {string} [packageManager]
|
||||
* @property {string} [testPath]
|
||||
* @property {string} [testRunner]
|
||||
* @property {string[]} [testExtensions]
|
||||
* @property {boolean | Record<string, boolean | string>} [skipTests]
|
||||
*/
|
||||
|
||||
@@ -979,68 +861,77 @@ async function getVendorTests(cwd) {
|
||||
}
|
||||
|
||||
return Promise.all(
|
||||
relevantVendors.map(async ({ package: name, repository, tag, testPath, testRunner, packageManager, skipTests }) => {
|
||||
const vendorPath = join(cwd, "vendor", name);
|
||||
relevantVendors.map(
|
||||
async ({ package: name, repository, tag, testPath, testExtensions, testRunner, packageManager, skipTests }) => {
|
||||
const vendorPath = join(cwd, "vendor", name);
|
||||
|
||||
if (!existsSync(vendorPath)) {
|
||||
await spawnSafe({
|
||||
command: "git",
|
||||
args: ["clone", "--depth", "1", "--single-branch", repository, vendorPath],
|
||||
timeout: testTimeout,
|
||||
cwd,
|
||||
});
|
||||
}
|
||||
|
||||
if (!existsSync(vendorPath)) {
|
||||
await spawnSafe({
|
||||
command: "git",
|
||||
args: ["clone", "--depth", "1", "--single-branch", repository, vendorPath],
|
||||
args: ["fetch", "--depth", "1", "origin", "tag", tag],
|
||||
timeout: testTimeout,
|
||||
cwd,
|
||||
cwd: vendorPath,
|
||||
});
|
||||
}
|
||||
|
||||
await spawnSafe({
|
||||
command: "git",
|
||||
args: ["fetch", "--depth", "1", "origin", "tag", tag],
|
||||
timeout: testTimeout,
|
||||
cwd: vendorPath,
|
||||
});
|
||||
|
||||
const packageJsonPath = join(vendorPath, "package.json");
|
||||
if (!existsSync(packageJsonPath)) {
|
||||
throw new Error(`Vendor '${name}' does not have a package.json: ${packageJsonPath}`);
|
||||
}
|
||||
|
||||
const testPathPrefix = testPath || "test";
|
||||
const testParentPath = join(vendorPath, testPathPrefix);
|
||||
if (!existsSync(testParentPath)) {
|
||||
throw new Error(`Vendor '${name}' does not have a test directory: ${testParentPath}`);
|
||||
}
|
||||
|
||||
const isTest = path => {
|
||||
if (!isJavaScriptTest(path)) {
|
||||
return false;
|
||||
const packageJsonPath = join(vendorPath, "package.json");
|
||||
if (!existsSync(packageJsonPath)) {
|
||||
throw new Error(`Vendor '${name}' does not have a package.json: ${packageJsonPath}`);
|
||||
}
|
||||
|
||||
if (typeof skipTests === "boolean") {
|
||||
return !skipTests;
|
||||
const testPathPrefix = testPath || "test";
|
||||
const testParentPath = join(vendorPath, testPathPrefix);
|
||||
if (!existsSync(testParentPath)) {
|
||||
throw new Error(`Vendor '${name}' does not have a test directory: ${testParentPath}`);
|
||||
}
|
||||
|
||||
if (typeof skipTests === "object") {
|
||||
for (const [glob, reason] of Object.entries(skipTests)) {
|
||||
const pattern = new RegExp(`^${glob.replace(/\*/g, ".*")}$`);
|
||||
if (pattern.test(path) && reason) {
|
||||
return false;
|
||||
const isTest = path => {
|
||||
if (!isJavaScriptTest(path)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof skipTests === "boolean") {
|
||||
return !skipTests;
|
||||
}
|
||||
|
||||
if (typeof skipTests === "object") {
|
||||
for (const [glob, reason] of Object.entries(skipTests)) {
|
||||
const pattern = new RegExp(`^${glob.replace(/\*/g, ".*")}$`);
|
||||
if (pattern.test(path) && reason) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
return true;
|
||||
};
|
||||
|
||||
const testPaths = readdirSync(testParentPath, { encoding: "utf-8", recursive: true })
|
||||
.filter(filename => isTest(filename))
|
||||
.map(filename => join(testPathPrefix, filename));
|
||||
const testPaths = readdirSync(testParentPath, { encoding: "utf-8", recursive: true })
|
||||
.filter(filename =>
|
||||
testExtensions ? testExtensions.some(ext => filename.endsWith(`.${ext}`)) : isTest(filename),
|
||||
)
|
||||
.map(filename => join(testPathPrefix, filename))
|
||||
.filter(
|
||||
filename =>
|
||||
!filters?.length ||
|
||||
filters.some(filter => join(vendorPath, filename).replace(/\\/g, "/").includes(filter)),
|
||||
);
|
||||
|
||||
return {
|
||||
cwd: vendorPath,
|
||||
packageManager: packageManager || "bun",
|
||||
testRunner: testRunner || "bun",
|
||||
testPaths,
|
||||
};
|
||||
}),
|
||||
return {
|
||||
cwd: vendorPath,
|
||||
packageManager: packageManager || "bun",
|
||||
testRunner: testRunner || "bun",
|
||||
testPaths,
|
||||
};
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1126,27 +1017,6 @@ function getRelevantTests(cwd) {
|
||||
return filteredTests;
|
||||
}
|
||||
|
||||
let ntStatus;
|
||||
|
||||
/**
|
||||
* @param {number} exitCode
|
||||
* @returns {string}
|
||||
*/
|
||||
function getWindowsExitCode(exitCode) {
|
||||
if (ntStatus === undefined) {
|
||||
const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h";
|
||||
try {
|
||||
ntStatus = readFileSync(ntStatusPath, "utf-8");
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
ntStatus = "";
|
||||
}
|
||||
}
|
||||
|
||||
const match = ntStatus.match(new RegExp(`(STATUS_\\w+).*0x${exitCode?.toString(16)}`, "i"));
|
||||
return match?.[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} bunExe
|
||||
* @returns {string}
|
||||
@@ -1216,17 +1086,7 @@ async function getExecPathFromBuildKite(target) {
|
||||
throw new Error(`Could not find ${target}.zip from Buildkite: ${releasePath}`);
|
||||
}
|
||||
|
||||
if (isWindows) {
|
||||
await spawnSafe({
|
||||
command: "powershell",
|
||||
args: ["-Command", `Expand-Archive -Path ${zipPath} -DestinationPath ${releasePath} -Force`],
|
||||
});
|
||||
} else {
|
||||
await spawnSafe({
|
||||
command: "unzip",
|
||||
args: ["-o", zipPath, "-d", releasePath],
|
||||
});
|
||||
}
|
||||
await unzip(zipPath, releasePath);
|
||||
|
||||
for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) {
|
||||
const execPath = join(releasePath, entry);
|
||||
@@ -1262,308 +1122,6 @@ function getRevision(execPath) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getOsText() {
|
||||
const { platform } = process;
|
||||
switch (platform) {
|
||||
case "darwin":
|
||||
return "darwin";
|
||||
case "linux":
|
||||
return "linux";
|
||||
case "win32":
|
||||
return "windows";
|
||||
default:
|
||||
return platform;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getOsPrettyText() {
|
||||
const { platform } = process;
|
||||
if (platform === "darwin") {
|
||||
const properties = {};
|
||||
for (const property of ["productName", "productVersion", "buildVersion"]) {
|
||||
try {
|
||||
const { error, stdout } = spawnSync("sw_vers", [`-${property}`], {
|
||||
encoding: "utf-8",
|
||||
timeout: spawnTimeout,
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
},
|
||||
});
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
properties[property] = stdout.trim();
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
}
|
||||
}
|
||||
const { productName, productVersion, buildVersion } = properties;
|
||||
if (!productName) {
|
||||
return "macOS";
|
||||
}
|
||||
if (!productVersion) {
|
||||
return productName;
|
||||
}
|
||||
if (!buildVersion) {
|
||||
return `${productName} ${productVersion}`;
|
||||
}
|
||||
return `${productName} ${productVersion} (build: ${buildVersion})`;
|
||||
}
|
||||
if (platform === "linux") {
|
||||
try {
|
||||
const { error, stdout } = spawnSync("lsb_release", ["--description", "--short"], {
|
||||
encoding: "utf-8",
|
||||
timeout: spawnTimeout,
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
},
|
||||
});
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
return "Linux";
|
||||
}
|
||||
}
|
||||
if (platform === "win32") {
|
||||
try {
|
||||
const { error, stdout } = spawnSync("cmd", ["/c", "ver"], {
|
||||
encoding: "utf-8",
|
||||
timeout: spawnTimeout,
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
},
|
||||
});
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
return "Windows";
|
||||
}
|
||||
}
|
||||
return platform;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getOsEmoji() {
|
||||
const { platform } = process;
|
||||
switch (platform) {
|
||||
case "darwin":
|
||||
return isBuildKite ? ":apple:" : "";
|
||||
case "win32":
|
||||
return isBuildKite ? ":windows:" : "🪟";
|
||||
case "linux":
|
||||
return isBuildKite ? ":linux:" : "🐧";
|
||||
default:
|
||||
return "🔮";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getArchText() {
|
||||
const { arch } = process;
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
return "x64";
|
||||
case "arm64":
|
||||
return "aarch64";
|
||||
default:
|
||||
return arch;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getArchEmoji() {
|
||||
const { arch } = process;
|
||||
switch (arch) {
|
||||
case "x64":
|
||||
return "🖥";
|
||||
case "arm64":
|
||||
return "💪";
|
||||
default:
|
||||
return "🔮";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getGlibcVersion() {
|
||||
if (!isLinux) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const { header } = process.report.getReport();
|
||||
const { glibcVersionRuntime } = header;
|
||||
if (typeof glibcVersionRuntime === "string") {
|
||||
return glibcVersionRuntime;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getBuildUrl() {
|
||||
if (isBuildKite) {
|
||||
const buildUrl = process.env["BUILDKITE_BUILD_URL"];
|
||||
const jobId = process.env["BUILDKITE_JOB_ID"];
|
||||
if (buildUrl) {
|
||||
return jobId ? `${buildUrl}#${jobId}` : buildUrl;
|
||||
}
|
||||
}
|
||||
if (isGitHubAction) {
|
||||
const baseUrl = process.env["GITHUB_SERVER_URL"];
|
||||
const repository = process.env["GITHUB_REPOSITORY"];
|
||||
const runId = process.env["GITHUB_RUN_ID"];
|
||||
if (baseUrl && repository && runId) {
|
||||
return `${baseUrl}/${repository}/actions/runs/${runId}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string}
|
||||
*/
|
||||
function getBuildLabel() {
|
||||
if (isBuildKite) {
|
||||
const label = process.env["BUILDKITE_LABEL"] || process.env["BUILDKITE_GROUP_LABEL"];
|
||||
if (label) {
|
||||
return label.replace("- test-bun", "").replace("- bun-test", "").trim();
|
||||
}
|
||||
}
|
||||
return `${getOsEmoji()} ${getArchText()}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
* @param {number} [line]
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getFileUrl(file, line) {
|
||||
const filePath = file.replace(/\\/g, "/");
|
||||
|
||||
let url;
|
||||
if (pullRequest) {
|
||||
const fileMd5 = crypto.createHash("md5").update(filePath).digest("hex");
|
||||
url = `${baseUrl}/${repository}/pull/${pullRequest}/files#diff-${fileMd5}`;
|
||||
if (line !== undefined) {
|
||||
url += `L${line}`;
|
||||
}
|
||||
} else if (gitSha) {
|
||||
url = `${baseUrl}/${repository}/blob/${gitSha}/${filePath}`;
|
||||
if (line !== undefined) {
|
||||
url += `#L${line}`;
|
||||
}
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getCI() {
|
||||
if (isBuildKite) {
|
||||
return "BuildKite";
|
||||
}
|
||||
if (isGitHubAction) {
|
||||
return "GitHub Actions";
|
||||
}
|
||||
if (isCI) {
|
||||
return "CI";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getCloud() {
|
||||
if (isAWS) {
|
||||
return "AWS";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getHostname() {
|
||||
if (isBuildKite) {
|
||||
return process.env["BUILDKITE_AGENT_NAME"];
|
||||
}
|
||||
try {
|
||||
return hostname();
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<string | undefined>}
|
||||
*/
|
||||
async function getPublicIp() {
|
||||
const addressUrls = ["https://checkip.amazonaws.com", "https://ipinfo.io/ip"];
|
||||
if (isAWS) {
|
||||
addressUrls.unshift("http://169.254.169.254/latest/meta-data/public-ipv4");
|
||||
}
|
||||
for (const url of addressUrls) {
|
||||
try {
|
||||
const response = await fetch(url);
|
||||
const { ok, status, statusText } = response;
|
||||
if (!ok) {
|
||||
throw new Error(`${status} ${statusText}: ${url}`);
|
||||
}
|
||||
const text = await response.text();
|
||||
const address = text.trim();
|
||||
if (isIP(address)) {
|
||||
return address;
|
||||
} else {
|
||||
throw new Error(`Invalid IP address: ${address}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
function getTailscaleIp() {
|
||||
try {
|
||||
const { status, stdout } = spawnSync("tailscale", ["ip", "--1"], {
|
||||
encoding: "utf-8",
|
||||
timeout: spawnTimeout,
|
||||
env: {
|
||||
PATH: process.env.PATH,
|
||||
},
|
||||
});
|
||||
if (status === 0) {
|
||||
return stdout.trim();
|
||||
}
|
||||
} catch {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {...string} paths
|
||||
* @returns {string}
|
||||
@@ -1575,28 +1133,6 @@ function addPath(...paths) {
|
||||
return paths.join(":");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} title
|
||||
* @param {function} fn
|
||||
*/
|
||||
async function runTask(title, fn) {
|
||||
if (isGitHubAction) {
|
||||
console.log(`::group::${stripAnsi(title)}`);
|
||||
} else if (isBuildKite) {
|
||||
console.log(`--- ${title}`);
|
||||
} else {
|
||||
console.log(title);
|
||||
}
|
||||
try {
|
||||
return await fn();
|
||||
} finally {
|
||||
if (isGitHubAction) {
|
||||
console.log("::endgroup::");
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {TestResult | TestResult[]} result
|
||||
* @param {boolean} concise
|
||||
@@ -1649,7 +1185,7 @@ function formatTestToMarkdown(result, concise) {
|
||||
markdown += "</li>\n";
|
||||
} else {
|
||||
markdown += "</summary>\n\n";
|
||||
if (isBuildKite) {
|
||||
if (isBuildkite) {
|
||||
const preview = escapeCodeBlock(stdout);
|
||||
markdown += `\`\`\`terminal\n${preview}\n\`\`\`\n`;
|
||||
} else {
|
||||
@@ -1843,42 +1379,6 @@ function parseDuration(duration) {
|
||||
return parseFloat(value) * (unit === "ms" ? 1 : 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} status
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTestEmoji(status) {
|
||||
switch (status) {
|
||||
case "pass":
|
||||
return "✅";
|
||||
case "fail":
|
||||
return "❌";
|
||||
case "skip":
|
||||
return "⏭";
|
||||
case "todo":
|
||||
return "✏️";
|
||||
default:
|
||||
return "🔮";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} status
|
||||
* @returns {string}
|
||||
*/
|
||||
function getTestColor(status) {
|
||||
switch (status) {
|
||||
case "pass":
|
||||
return getAnsi("green");
|
||||
case "fail":
|
||||
return getAnsi("red");
|
||||
case "skip":
|
||||
case "todo":
|
||||
default:
|
||||
return getAnsi("gray");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} execPath
|
||||
* @returns {boolean}
|
||||
@@ -1902,7 +1402,7 @@ function getExitCode(outcome) {
|
||||
if (outcome === "pass") {
|
||||
return 0;
|
||||
}
|
||||
if (!isBuildKite) {
|
||||
if (!isBuildkite) {
|
||||
return 1;
|
||||
}
|
||||
// On Buildkite, you can define a `soft_fail` property to differentiate
|
||||
@@ -1916,52 +1416,25 @@ function getExitCode(outcome) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<Date | undefined>}
|
||||
*/
|
||||
async function getDoomsdayDate() {
|
||||
try {
|
||||
const response = await fetch("http://169.254.169.254/latest/meta-data/spot/instance-action");
|
||||
if (response.ok) {
|
||||
const { time } = await response.json();
|
||||
return new Date(time);
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} signal
|
||||
*/
|
||||
async function beforeExit(signal) {
|
||||
const endOfWorld = await getDoomsdayDate();
|
||||
if (endOfWorld) {
|
||||
const timeMin = 10 * 1000;
|
||||
const timeLeft = Math.max(0, date.getTime() - Date.now());
|
||||
if (timeLeft > timeMin) {
|
||||
setTimeout(() => onExit(signal), timeLeft - timeMin);
|
||||
return;
|
||||
}
|
||||
}
|
||||
onExit(signal);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} signal
|
||||
*/
|
||||
async function onExit(signal) {
|
||||
function onExit(signal) {
|
||||
const label = `${getAnsi("red")}Received ${signal}, exiting...${getAnsi("reset")}`;
|
||||
await runTask(label, () => {
|
||||
startGroup(label, () => {
|
||||
process.exit(getExitCode("cancel"));
|
||||
});
|
||||
}
|
||||
|
||||
for (const signal of ["SIGINT", "SIGTERM", "SIGHUP"]) {
|
||||
process.on(signal, () => beforeExit(signal));
|
||||
export async function main() {
|
||||
for (const signal of ["SIGINT", "SIGTERM", "SIGHUP"]) {
|
||||
process.on(signal, () => onExit(signal));
|
||||
}
|
||||
|
||||
printEnvironment();
|
||||
const results = await runTests();
|
||||
const ok = results.every(({ ok }) => ok);
|
||||
process.exit(getExitCode(ok ? "pass" : "fail"));
|
||||
}
|
||||
|
||||
await runTask("Environment", printInfo);
|
||||
const results = await runTests();
|
||||
const ok = results.every(({ ok }) => ok);
|
||||
process.exit(getExitCode(ok ? "pass" : "fail"));
|
||||
await main();
|
||||
|
||||
1609
scripts/utils.mjs
Normal file
1609
scripts/utils.mjs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,7 @@
|
||||
#include <JavaScriptCore/LazyPropertyInlines.h>
|
||||
#include <JavaScriptCore/VMTrapsInlines.h>
|
||||
#include <JavaScriptCore/JSModuleLoader.h>
|
||||
|
||||
#include <JavaScriptCore/Debugger.h>
|
||||
#include <utility>
|
||||
|
||||
#include "InternalModuleRegistryConstants.h"
|
||||
@@ -54,6 +54,9 @@ JSC::JSValue generateModule(JSC::JSGlobalObject* globalObject, JSC::VM& vm, cons
|
||||
static_cast<JSC::JSGlobalObject*>(globalObject));
|
||||
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
if (UNLIKELY(globalObject->hasDebugger() && globalObject->debugger()->isInteractivelyDebugging())) {
|
||||
globalObject->debugger()->sourceParsed(globalObject, source.provider(), -1, ""_s);
|
||||
}
|
||||
|
||||
JSC::MarkedArgumentBuffer argList;
|
||||
JSValue result = JSC::profiledCall(
|
||||
|
||||
@@ -3443,6 +3443,42 @@ JSC_DEFINE_CUSTOM_SETTER(EventSource_setter,
|
||||
return true;
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFunctionToClass, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame))
|
||||
{
|
||||
// Mimick the behavior of class Foo {} for a regular JSFunction.
|
||||
auto& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto target = callFrame->argument(0).toObject(globalObject);
|
||||
auto name = callFrame->argument(1);
|
||||
JSObject* base = callFrame->argument(2).getObject();
|
||||
JSObject* prototypeBase = nullptr;
|
||||
RETURN_IF_EXCEPTION(scope, encodedJSValue());
|
||||
|
||||
if (!base) {
|
||||
base = globalObject->functionPrototype();
|
||||
} else if (auto proto = base->getIfPropertyExists(globalObject, vm.propertyNames->prototype)) {
|
||||
if (auto protoObject = proto.getObject()) {
|
||||
prototypeBase = protoObject;
|
||||
}
|
||||
} else {
|
||||
RETURN_IF_EXCEPTION(scope, encodedJSValue());
|
||||
JSC::throwTypeError(globalObject, scope, "Base class must have a prototype property"_s);
|
||||
return encodedJSValue();
|
||||
}
|
||||
|
||||
JSObject* prototype = prototypeBase ? JSC::constructEmptyObject(globalObject, prototypeBase) : JSC::constructEmptyObject(globalObject);
|
||||
RETURN_IF_EXCEPTION(scope, encodedJSValue());
|
||||
|
||||
prototype->structure()->setMayBePrototype(true);
|
||||
prototype->putDirect(vm, vm.propertyNames->constructor, target, PropertyAttribute::DontEnum | 0);
|
||||
|
||||
target->setPrototypeDirect(vm, base);
|
||||
target->putDirect(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | 0);
|
||||
target->putDirect(vm, vm.propertyNames->name, name, PropertyAttribute::DontEnum | 0);
|
||||
|
||||
return JSValue::encode(jsUndefined());
|
||||
}
|
||||
|
||||
EncodedJSValue GlobalObject::assignToStream(JSValue stream, JSValue controller)
|
||||
{
|
||||
JSC::VM& vm = this->vm();
|
||||
@@ -3544,6 +3580,7 @@ void GlobalObject::addBuiltinGlobals(JSC::VM& vm)
|
||||
GlobalPropertyInfo(builtinNames.requireMapPrivateName(), this->requireMap(), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly | 0),
|
||||
GlobalPropertyInfo(builtinNames.TextEncoderStreamEncoderPrivateName(), JSTextEncoderStreamEncoderConstructor(), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly | 0),
|
||||
GlobalPropertyInfo(builtinNames.makeErrorWithCodePrivateName(), JSFunction::create(vm, this, 2, String(), jsFunctionMakeErrorWithCode, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly),
|
||||
GlobalPropertyInfo(builtinNames.toClassPrivateName(), JSFunction::create(vm, this, 1, String(), jsFunctionToClass, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly),
|
||||
};
|
||||
addStaticGlobals(staticGlobals, std::size(staticGlobals));
|
||||
|
||||
|
||||
@@ -14,22 +14,22 @@ pub const OS = struct {
|
||||
pub fn create(globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
const module = JSC.JSValue.createEmptyObject(globalObject, 16);
|
||||
|
||||
module.put(globalObject, JSC.ZigString.static("cpus"), JSC.NewFunction(globalObject, JSC.ZigString.static("cpus"), 0, cpus, true));
|
||||
module.put(globalObject, JSC.ZigString.static("freemem"), JSC.NewFunction(globalObject, JSC.ZigString.static("freemem"), 0, freemem, true));
|
||||
module.put(globalObject, JSC.ZigString.static("getPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("getPriority"), 1, getPriority, true));
|
||||
module.put(globalObject, JSC.ZigString.static("homedir"), JSC.NewFunction(globalObject, JSC.ZigString.static("homedir"), 0, homedir, true));
|
||||
module.put(globalObject, JSC.ZigString.static("hostname"), JSC.NewFunction(globalObject, JSC.ZigString.static("hostname"), 0, hostname, true));
|
||||
module.put(globalObject, JSC.ZigString.static("loadavg"), JSC.NewFunction(globalObject, JSC.ZigString.static("loadavg"), 0, loadavg, true));
|
||||
module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, true));
|
||||
module.put(globalObject, JSC.ZigString.static("networkInterfaces"), JSC.NewFunction(globalObject, JSC.ZigString.static("networkInterfaces"), 0, networkInterfaces, true));
|
||||
module.put(globalObject, JSC.ZigString.static("release"), JSC.NewFunction(globalObject, JSC.ZigString.static("release"), 0, release, true));
|
||||
module.put(globalObject, JSC.ZigString.static("setPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("setPriority"), 2, setPriority, true));
|
||||
module.put(globalObject, JSC.ZigString.static("totalmem"), JSC.NewFunction(globalObject, JSC.ZigString.static("totalmem"), 0, totalmem, true));
|
||||
module.put(globalObject, JSC.ZigString.static("type"), JSC.NewFunction(globalObject, JSC.ZigString.static("type"), 0, OS.type, true));
|
||||
module.put(globalObject, JSC.ZigString.static("uptime"), JSC.NewFunction(globalObject, JSC.ZigString.static("uptime"), 0, uptime, true));
|
||||
module.put(globalObject, JSC.ZigString.static("userInfo"), JSC.NewFunction(globalObject, JSC.ZigString.static("userInfo"), 0, userInfo, true));
|
||||
module.put(globalObject, JSC.ZigString.static("version"), JSC.NewFunction(globalObject, JSC.ZigString.static("version"), 0, version, true));
|
||||
module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, true));
|
||||
module.put(globalObject, JSC.ZigString.static("cpus"), JSC.NewFunction(globalObject, JSC.ZigString.static("cpus"), 0, cpus, false));
|
||||
module.put(globalObject, JSC.ZigString.static("freemem"), JSC.NewFunction(globalObject, JSC.ZigString.static("freemem"), 0, freemem, false));
|
||||
module.put(globalObject, JSC.ZigString.static("getPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("getPriority"), 1, getPriority, false));
|
||||
module.put(globalObject, JSC.ZigString.static("homedir"), JSC.NewFunction(globalObject, JSC.ZigString.static("homedir"), 0, homedir, false));
|
||||
module.put(globalObject, JSC.ZigString.static("hostname"), JSC.NewFunction(globalObject, JSC.ZigString.static("hostname"), 0, hostname, false));
|
||||
module.put(globalObject, JSC.ZigString.static("loadavg"), JSC.NewFunction(globalObject, JSC.ZigString.static("loadavg"), 0, loadavg, false));
|
||||
module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, false));
|
||||
module.put(globalObject, JSC.ZigString.static("networkInterfaces"), JSC.NewFunction(globalObject, JSC.ZigString.static("networkInterfaces"), 0, networkInterfaces, false));
|
||||
module.put(globalObject, JSC.ZigString.static("release"), JSC.NewFunction(globalObject, JSC.ZigString.static("release"), 0, release, false));
|
||||
module.put(globalObject, JSC.ZigString.static("setPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("setPriority"), 2, setPriority, false));
|
||||
module.put(globalObject, JSC.ZigString.static("totalmem"), JSC.NewFunction(globalObject, JSC.ZigString.static("totalmem"), 0, totalmem, false));
|
||||
module.put(globalObject, JSC.ZigString.static("type"), JSC.NewFunction(globalObject, JSC.ZigString.static("type"), 0, OS.type, false));
|
||||
module.put(globalObject, JSC.ZigString.static("uptime"), JSC.NewFunction(globalObject, JSC.ZigString.static("uptime"), 0, uptime, false));
|
||||
module.put(globalObject, JSC.ZigString.static("userInfo"), JSC.NewFunction(globalObject, JSC.ZigString.static("userInfo"), 0, userInfo, false));
|
||||
module.put(globalObject, JSC.ZigString.static("version"), JSC.NewFunction(globalObject, JSC.ZigString.static("version"), 0, version, false));
|
||||
module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, false));
|
||||
|
||||
return module;
|
||||
}
|
||||
|
||||
@@ -51,6 +51,7 @@ const Resolver = _resolver.Resolver;
|
||||
const TOML = @import("./toml/toml_parser.zig").TOML;
|
||||
const JSC = bun.JSC;
|
||||
const PackageManager = @import("./install/install.zig").PackageManager;
|
||||
const DataURL = @import("./resolver/data_url.zig").DataURL;
|
||||
|
||||
pub fn MacroJSValueType_() type {
|
||||
if (comptime JSC.is_bindgen) {
|
||||
@@ -1300,6 +1301,18 @@ pub const Bundler = struct {
|
||||
break :brk logger.Source.initPathString(path.text, "");
|
||||
}
|
||||
|
||||
if (strings.startsWith(path.text, "data:")) {
|
||||
const data_url = DataURL.parseWithoutCheck(path.text) catch |err| {
|
||||
bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} parsing data url \"{s}\"", .{ @errorName(err), path.text }) catch {};
|
||||
return null;
|
||||
};
|
||||
const body = data_url.decodeData(this_parse.allocator) catch |err| {
|
||||
bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} decoding data \"{s}\"", .{ @errorName(err), path.text }) catch {};
|
||||
return null;
|
||||
};
|
||||
break :brk logger.Source.initPathString(path.text, body);
|
||||
}
|
||||
|
||||
const entry = bundler.resolver.caches.fs.readFileWithAllocator(
|
||||
if (use_shared_buffer) bun.fs_allocator else this_parse.allocator,
|
||||
bundler.fs,
|
||||
|
||||
13
src/js/builtins.d.ts
vendored
13
src/js/builtins.d.ts
vendored
@@ -546,3 +546,16 @@ declare interface Error {
|
||||
*/
|
||||
declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedType: string, actualValue: string): TypeError;
|
||||
declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedTypes: any[], actualValue: string): TypeError;
|
||||
/**
|
||||
* Convert a function to a class-like object.
|
||||
*
|
||||
* This does:
|
||||
* - Sets the name of the function to the given name
|
||||
* - Sets .prototype to Object.create(base?.prototype, { constructor: { value: fn } })
|
||||
* - Calls Object.setPrototypeOf(fn, base ?? Function.prototype)
|
||||
*
|
||||
* @param fn - The function to convert to a class
|
||||
* @param name - The name of the class
|
||||
* @param base - The base class to inherit from
|
||||
*/
|
||||
declare function $toClass(fn: Function, name: string, base?: Function | undefined | null);
|
||||
|
||||
@@ -81,7 +81,6 @@ using namespace JSC;
|
||||
macro(encoding) \
|
||||
macro(end) \
|
||||
macro(errno) \
|
||||
macro(makeErrorWithCode) \
|
||||
macro(errorSteps) \
|
||||
macro(evaluateCommonJSModule) \
|
||||
macro(evaluated) \
|
||||
@@ -134,6 +133,7 @@ using namespace JSC;
|
||||
macro(localStreams) \
|
||||
macro(main) \
|
||||
macro(makeDOMException) \
|
||||
macro(makeErrorWithCode) \
|
||||
macro(makeGetterTypeError) \
|
||||
macro(makeThisTypeError) \
|
||||
macro(method) \
|
||||
@@ -152,8 +152,8 @@ using namespace JSC;
|
||||
macro(password) \
|
||||
macro(patch) \
|
||||
macro(path) \
|
||||
macro(paths) \
|
||||
macro(pathname) \
|
||||
macro(paths) \
|
||||
macro(pause) \
|
||||
macro(pendingAbortRequest) \
|
||||
macro(pendingPullIntos) \
|
||||
@@ -227,6 +227,7 @@ using namespace JSC;
|
||||
macro(textEncoderStreamEncoder) \
|
||||
macro(TextEncoderStreamEncoder) \
|
||||
macro(textEncoderStreamTransform) \
|
||||
macro(toClass) \
|
||||
macro(toNamespacedPath) \
|
||||
macro(trace) \
|
||||
macro(transformAlgorithm) \
|
||||
|
||||
@@ -87,31 +87,40 @@ function lazyCpus({ cpus }) {
|
||||
// all logic based on `process.platform` and `process.arch` is inlined at bundle time
|
||||
function bound(obj) {
|
||||
return {
|
||||
availableParallelism: () => navigator.hardwareConcurrency,
|
||||
arch: () => process.arch,
|
||||
availableParallelism: function () {
|
||||
return navigator.hardwareConcurrency;
|
||||
},
|
||||
arch: function () {
|
||||
return process.arch;
|
||||
},
|
||||
cpus: lazyCpus(obj),
|
||||
endianness: () => (process.arch === "arm64" || process.arch === "x64" ? "LE" : $bundleError("TODO: endianness")),
|
||||
endianness: function () {
|
||||
return process.arch === "arm64" || process.arch === "x64" ? "LE" : $bundleError("TODO: endianness");
|
||||
},
|
||||
freemem: obj.freemem.bind(obj),
|
||||
getPriority: obj.getPriority.bind(obj),
|
||||
homedir: obj.homedir.bind(obj),
|
||||
hostname: obj.hostname.bind(obj),
|
||||
loadavg: obj.loadavg.bind(obj),
|
||||
networkInterfaces: obj.networkInterfaces.bind(obj),
|
||||
platform: () => process.platform,
|
||||
platform: function () {
|
||||
return process.platform;
|
||||
},
|
||||
release: obj.release.bind(obj),
|
||||
setPriority: obj.setPriority.bind(obj),
|
||||
get tmpdir() {
|
||||
return tmpdir;
|
||||
},
|
||||
totalmem: obj.totalmem.bind(obj),
|
||||
type: () =>
|
||||
process.platform === "win32"
|
||||
type: function () {
|
||||
return process.platform === "win32"
|
||||
? "Windows_NT"
|
||||
: process.platform === "darwin"
|
||||
? "Darwin"
|
||||
: process.platform === "linux"
|
||||
? "Linux"
|
||||
: $bundleError("TODO: type"),
|
||||
: $bundleError("TODO: type");
|
||||
},
|
||||
uptime: obj.uptime.bind(obj),
|
||||
userInfo: obj.userInfo.bind(obj),
|
||||
version: obj.version.bind(obj),
|
||||
|
||||
@@ -206,7 +206,7 @@ function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) {
|
||||
this._info = opts && opts.info;
|
||||
this._maxOutputLength = maxOutputLength;
|
||||
}
|
||||
ZlibBase.prototype = Object.create(Transform.prototype);
|
||||
$toClass(ZlibBase, "ZlibBase", Transform);
|
||||
|
||||
ObjectDefineProperty(ZlibBase.prototype, "_closed", {
|
||||
configurable: true,
|
||||
@@ -576,7 +576,7 @@ function Zlib(opts, mode) {
|
||||
this._level = level;
|
||||
this._strategy = strategy;
|
||||
}
|
||||
Zlib.prototype = Object.create(ZlibBase.prototype);
|
||||
$toClass(Zlib, "Zlib", ZlibBase);
|
||||
|
||||
// This callback is used by `.params()` to wait until a full flush happened before adjusting the parameters.
|
||||
// In particular, the call to the native `params()` function should not happen while a write is currently in progress on the threadpool.
|
||||
@@ -605,58 +605,63 @@ function Deflate(opts) {
|
||||
if (!(this instanceof Deflate)) return new Deflate(opts);
|
||||
Zlib.$apply(this, [opts, DEFLATE]);
|
||||
}
|
||||
Deflate.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(Deflate, "Deflate", Zlib);
|
||||
|
||||
function Inflate(opts) {
|
||||
if (!(this instanceof Inflate)) return new Inflate(opts);
|
||||
Zlib.$apply(this, [opts, INFLATE]);
|
||||
}
|
||||
Inflate.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(Inflate, "Inflate", Zlib);
|
||||
|
||||
function Gzip(opts) {
|
||||
if (!(this instanceof Gzip)) return new Gzip(opts);
|
||||
Zlib.$apply(this, [opts, GZIP]);
|
||||
}
|
||||
Gzip.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(Gzip, "Gzip", Zlib);
|
||||
|
||||
function Gunzip(opts) {
|
||||
if (!(this instanceof Gunzip)) return new Gunzip(opts);
|
||||
Zlib.$apply(this, [opts, GUNZIP]);
|
||||
}
|
||||
Gunzip.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(Gunzip, "Gunzip", Zlib);
|
||||
|
||||
function DeflateRaw(opts) {
|
||||
if (opts && opts.windowBits === 8) opts.windowBits = 9;
|
||||
if (!(this instanceof DeflateRaw)) return new DeflateRaw(opts);
|
||||
Zlib.$apply(this, [opts, DEFLATERAW]);
|
||||
}
|
||||
DeflateRaw.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(DeflateRaw, "DeflateRaw", Zlib);
|
||||
|
||||
function InflateRaw(opts) {
|
||||
if (!(this instanceof InflateRaw)) return new InflateRaw(opts);
|
||||
Zlib.$apply(this, [opts, INFLATERAW]);
|
||||
}
|
||||
InflateRaw.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(InflateRaw, "InflateRaw", Zlib);
|
||||
|
||||
function Unzip(opts) {
|
||||
if (!(this instanceof Unzip)) return new Unzip(opts);
|
||||
Zlib.$apply(this, [opts, UNZIP]);
|
||||
}
|
||||
Unzip.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(Unzip, "Unzip", Zlib);
|
||||
|
||||
function createConvenienceMethod(ctor, sync) {
|
||||
function createConvenienceMethod(ctor, sync, methodName) {
|
||||
if (sync) {
|
||||
return function syncBufferWrapper(buffer, opts) {
|
||||
const fn = function (buffer, opts) {
|
||||
return zlibBufferSync(new ctor(opts), buffer);
|
||||
};
|
||||
ObjectDefineProperty(fn, "name", { value: methodName });
|
||||
return fn;
|
||||
} else {
|
||||
const fn = function (buffer, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = {};
|
||||
}
|
||||
return zlibBuffer(new ctor(opts), buffer, callback);
|
||||
};
|
||||
ObjectDefineProperty(fn, "name", { value: methodName });
|
||||
return fn;
|
||||
}
|
||||
return function asyncBufferWrapper(buffer, opts, callback) {
|
||||
if (typeof opts === "function") {
|
||||
callback = opts;
|
||||
opts = {};
|
||||
}
|
||||
return zlibBuffer(new ctor(opts), buffer, callback);
|
||||
};
|
||||
}
|
||||
|
||||
const kMaxBrotliParam = 9;
|
||||
@@ -696,29 +701,19 @@ function Brotli(opts, mode) {
|
||||
|
||||
ZlibBase.$apply(this, [opts, mode, handle, brotliDefaultOpts]);
|
||||
}
|
||||
Brotli.prototype = Object.create(Zlib.prototype);
|
||||
$toClass(Brotli, "Brotli", Zlib);
|
||||
|
||||
function BrotliCompress(opts) {
|
||||
if (!(this instanceof BrotliCompress)) return new BrotliCompress(opts);
|
||||
Brotli.$apply(this, [opts, BROTLI_ENCODE]);
|
||||
}
|
||||
BrotliCompress.prototype = Object.create(Brotli.prototype);
|
||||
$toClass(BrotliCompress, "BrotliCompress", Brotli);
|
||||
|
||||
function BrotliDecompress(opts) {
|
||||
if (!(this instanceof BrotliDecompress)) return new BrotliDecompress(opts);
|
||||
Brotli.$apply(this, [opts, BROTLI_DECODE]);
|
||||
}
|
||||
BrotliDecompress.prototype = Object.create(Brotli.prototype);
|
||||
|
||||
function createProperty(ctor) {
|
||||
return {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
value: function (options) {
|
||||
return new ctor(options);
|
||||
},
|
||||
};
|
||||
}
|
||||
$toClass(BrotliDecompress, "BrotliDecompress", Brotli);
|
||||
|
||||
// Legacy alias on the C++ wrapper object.
|
||||
ObjectDefineProperty(NativeZlib.prototype, "jsref", {
|
||||
@@ -743,36 +738,55 @@ const zlib = {
|
||||
BrotliCompress,
|
||||
BrotliDecompress,
|
||||
|
||||
deflate: createConvenienceMethod(Deflate, false),
|
||||
deflateSync: createConvenienceMethod(Deflate, true),
|
||||
gzip: createConvenienceMethod(Gzip, false),
|
||||
gzipSync: createConvenienceMethod(Gzip, true),
|
||||
deflateRaw: createConvenienceMethod(DeflateRaw, false),
|
||||
deflateRawSync: createConvenienceMethod(DeflateRaw, true),
|
||||
unzip: createConvenienceMethod(Unzip, false),
|
||||
unzipSync: createConvenienceMethod(Unzip, true),
|
||||
inflate: createConvenienceMethod(Inflate, false),
|
||||
inflateSync: createConvenienceMethod(Inflate, true),
|
||||
gunzip: createConvenienceMethod(Gunzip, false),
|
||||
gunzipSync: createConvenienceMethod(Gunzip, true),
|
||||
inflateRaw: createConvenienceMethod(InflateRaw, false),
|
||||
inflateRawSync: createConvenienceMethod(InflateRaw, true),
|
||||
brotliCompress: createConvenienceMethod(BrotliCompress, false),
|
||||
brotliCompressSync: createConvenienceMethod(BrotliCompress, true),
|
||||
brotliDecompress: createConvenienceMethod(BrotliDecompress, false),
|
||||
brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true),
|
||||
deflate: createConvenienceMethod(Deflate, false, "deflate"),
|
||||
deflateSync: createConvenienceMethod(Deflate, true, "deflateSync"),
|
||||
gzip: createConvenienceMethod(Gzip, false, "gzip"),
|
||||
gzipSync: createConvenienceMethod(Gzip, true, "gzipSync"),
|
||||
deflateRaw: createConvenienceMethod(DeflateRaw, false, "deflateRaw"),
|
||||
deflateRawSync: createConvenienceMethod(DeflateRaw, true, "deflateRawSync"),
|
||||
unzip: createConvenienceMethod(Unzip, false, "unzip"),
|
||||
unzipSync: createConvenienceMethod(Unzip, true, "unzipSync"),
|
||||
inflate: createConvenienceMethod(Inflate, false, "inflate"),
|
||||
inflateSync: createConvenienceMethod(Inflate, true, "inflateSync"),
|
||||
gunzip: createConvenienceMethod(Gunzip, false, "gunzip"),
|
||||
gunzipSync: createConvenienceMethod(Gunzip, true, "gunzipSync"),
|
||||
inflateRaw: createConvenienceMethod(InflateRaw, false, "inflateRaw"),
|
||||
inflateRawSync: createConvenienceMethod(InflateRaw, true, "inflateRawSync"),
|
||||
brotliCompress: createConvenienceMethod(BrotliCompress, false, "brotliCompress"),
|
||||
brotliCompressSync: createConvenienceMethod(BrotliCompress, true, "brotliCompressSync"),
|
||||
brotliDecompress: createConvenienceMethod(BrotliDecompress, false, "brotliDecompress"),
|
||||
brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true, "brotliDecompressSync"),
|
||||
|
||||
createDeflate: function (options) {
|
||||
return new Deflate(options);
|
||||
},
|
||||
createInflate: function (options) {
|
||||
return new Inflate(options);
|
||||
},
|
||||
createDeflateRaw: function (options) {
|
||||
return new DeflateRaw(options);
|
||||
},
|
||||
createInflateRaw: function (options) {
|
||||
return new InflateRaw(options);
|
||||
},
|
||||
createGzip: function (options) {
|
||||
return new Gzip(options);
|
||||
},
|
||||
createGunzip: function (options) {
|
||||
return new Gunzip(options);
|
||||
},
|
||||
createUnzip: function (options) {
|
||||
return new Unzip(options);
|
||||
},
|
||||
createBrotliCompress: function (options) {
|
||||
return new BrotliCompress(options);
|
||||
},
|
||||
createBrotliDecompress: function (options) {
|
||||
return new BrotliDecompress(options);
|
||||
},
|
||||
};
|
||||
|
||||
ObjectDefineProperties(zlib, {
|
||||
createDeflate: createProperty(Deflate),
|
||||
createInflate: createProperty(Inflate),
|
||||
createDeflateRaw: createProperty(DeflateRaw),
|
||||
createInflateRaw: createProperty(InflateRaw),
|
||||
createGzip: createProperty(Gzip),
|
||||
createGunzip: createProperty(Gunzip),
|
||||
createUnzip: createProperty(Unzip),
|
||||
createBrotliCompress: createProperty(BrotliCompress),
|
||||
createBrotliDecompress: createProperty(BrotliDecompress),
|
||||
constants: {
|
||||
enumerable: true,
|
||||
value: ObjectFreeze(constants),
|
||||
|
||||
@@ -29,7 +29,6 @@ pub const StrictModeReservedWords = tables.StrictModeReservedWords;
|
||||
pub const PropertyModifierKeyword = tables.PropertyModifierKeyword;
|
||||
pub const TypescriptStmtKeyword = tables.TypescriptStmtKeyword;
|
||||
pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifier;
|
||||
pub const ChildlessJSXTags = tables.ChildlessJSXTags;
|
||||
|
||||
fn notimpl() noreturn {
|
||||
Output.panic("not implemented yet!", .{});
|
||||
|
||||
@@ -552,26 +552,6 @@ pub const TypescriptStmtKeyword = enum {
|
||||
});
|
||||
};
|
||||
|
||||
// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.
|
||||
pub const ChildlessJSXTags = ComptimeStringMap(void, .{
|
||||
.{ "area", void },
|
||||
.{ "base", void },
|
||||
.{ "br", void },
|
||||
.{ "col", void },
|
||||
.{ "embed", void },
|
||||
.{ "hr", void },
|
||||
.{ "img", void },
|
||||
.{ "input", void },
|
||||
.{ "keygen", void },
|
||||
.{ "link", void },
|
||||
.{ "menuitem", void },
|
||||
.{ "meta", void },
|
||||
.{ "param", void },
|
||||
.{ "source", void },
|
||||
.{ "track", void },
|
||||
.{ "wbr", void },
|
||||
});
|
||||
|
||||
// In a microbenchmark, this outperforms
|
||||
pub const jsxEntity = ComptimeStringMap(CodePoint, .{
|
||||
.{ "Aacute", @as(CodePoint, 0x00C1) },
|
||||
|
||||
@@ -16345,24 +16345,7 @@ fn NewParser_(
|
||||
|
||||
const runtime = if (p.options.jsx.runtime == .automatic) options.JSX.Runtime.automatic else options.JSX.Runtime.classic;
|
||||
const is_key_after_spread = e_.flags.contains(.is_key_after_spread);
|
||||
var children_count = e_.children.len;
|
||||
|
||||
const is_childless_tag = FeatureFlags.react_specific_warnings and children_count > 0 and
|
||||
tag.data == .e_string and tag.data.e_string.isUTF8() and js_lexer.ChildlessJSXTags.has(tag.data.e_string.slice(p.allocator));
|
||||
|
||||
children_count = if (is_childless_tag) 0 else children_count;
|
||||
|
||||
if (children_count != e_.children.len) {
|
||||
// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`.
|
||||
// ^ from react-dom
|
||||
p.log.addWarningFmt(
|
||||
p.source,
|
||||
tag.loc,
|
||||
p.allocator,
|
||||
"\\<{s} /> is a void element and must not have \"children\"",
|
||||
.{tag.data.e_string.slice(p.allocator)},
|
||||
) catch {};
|
||||
}
|
||||
const children_count = e_.children.len;
|
||||
|
||||
// TODO: maybe we should split these into two different AST Nodes
|
||||
// That would reduce the amount of allocations a little
|
||||
|
||||
19
test/js/node/string-module.test.js
Normal file
19
test/js/node/string-module.test.js
Normal file
@@ -0,0 +1,19 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("should import and execute ES module from string", async () => {
|
||||
const code = `export default function test(arg) { return arg + arg };`;
|
||||
const mod = await import("data:text/javascript," + code).then(mod => mod.default);
|
||||
const result = mod(1);
|
||||
expect(result).toEqual(2);
|
||||
});
|
||||
|
||||
test("should import and execute ES module from string (base64)", async () => {
|
||||
const code = `export default function test(arg) { return arg + arg; }`;
|
||||
const mod = await import("data:text/javascript;base64," + btoa(code)).then(mod => mod.default);
|
||||
const result = mod(1);
|
||||
expect(result).toEqual(2);
|
||||
});
|
||||
|
||||
test("should throw when importing malformed string (base64)", async () => {
|
||||
expect(() => import("data:text/javascript;base64,asdasdasd")).toThrowError("Base64DecodeError");
|
||||
});
|
||||
@@ -8,6 +8,51 @@ import * as stream from "node:stream";
|
||||
import * as util from "node:util";
|
||||
import * as zlib from "node:zlib";
|
||||
|
||||
describe("prototype and name and constructor", () => {
|
||||
for (let [name, Class] of [
|
||||
["Gzip", zlib.Gzip],
|
||||
["Gunzip", zlib.Gunzip],
|
||||
["Deflate", zlib.Deflate],
|
||||
["Inflate", zlib.Inflate],
|
||||
["DeflateRaw", zlib.DeflateRaw],
|
||||
]) {
|
||||
describe(`${name}`, () => {
|
||||
it(`${name}.prototype should be instanceof ${name}.__proto__`, () => {
|
||||
expect(Class.prototype).toBeInstanceOf(Class.__proto__);
|
||||
});
|
||||
it(`${name}.prototype.constructor should be ${name}`, () => {
|
||||
expect(Class.prototype.constructor).toBe(Class);
|
||||
});
|
||||
it(`${name}.name should be ${name}`, () => {
|
||||
expect(Class.name).toBe(name);
|
||||
});
|
||||
it(`${name}.prototype.__proto__.constructor.name should be Zlib`, () => {
|
||||
expect(Class.prototype.__proto__.constructor.name).toBe("Zlib");
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
for (let [name, Class] of [
|
||||
["BrotliCompress", zlib.BrotliCompress],
|
||||
["BrotliDecompress", zlib.BrotliDecompress],
|
||||
]) {
|
||||
describe(`${name}`, () => {
|
||||
it(`${name}.prototype should be instanceof ${name}.__proto__`, () => {
|
||||
expect(Class.prototype).toBeInstanceOf(Class.__proto__);
|
||||
});
|
||||
it(`${name}.prototype.constructor should be ${name}`, () => {
|
||||
expect(Class.prototype.constructor).toBe(Class);
|
||||
});
|
||||
it(`${name}.name should be ${name}`, () => {
|
||||
expect(Class.name).toBe(name);
|
||||
});
|
||||
it(`${name}.prototype.__proto__.constructor.name should be Brotli`, () => {
|
||||
expect(Class.prototype.__proto__.constructor.name).toBe("Brotli");
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe("zlib", () => {
|
||||
for (let library of ["zlib", "libdeflate"]) {
|
||||
for (let outputLibrary of ["zlib", "libdeflate"]) {
|
||||
|
||||
30
test/regression/issue/14515.test.tsx
Normal file
30
test/regression/issue/14515.test.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
export function Input(a: InlineInputAttrs, ch: DocumentFragment) {
|
||||
const o_model = a.model
|
||||
const nullable = (a.type||'').indexOf('null') > -1
|
||||
|
||||
return <input>
|
||||
{$on('input', (ev) => {
|
||||
var v = ev.currentTarget.value
|
||||
if (nullable && v === '') {
|
||||
o_model.set(null!)
|
||||
} else {
|
||||
// @ts-ignore typescript is confused by the type of o_model, rightly so.
|
||||
o_model.set(to_obs(v))
|
||||
}
|
||||
})}
|
||||
|
||||
</input>
|
||||
|
||||
}
|
||||
|
||||
function _pad(n: number) {
|
||||
return (n < 10 ? ('0' + n) : n)
|
||||
}
|
||||
|
||||
function _iso_date(d: Date) {
|
||||
return `${d.getFullYear()}-${_pad(d.getMonth()+1)}-${_pad(d.getDate())}`
|
||||
}
|
||||
|
||||
test("runs without crashing", () => { })
|
||||
15
test/runners/mocha.ts
Normal file
15
test/runners/mocha.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { describe, test, it } from "bun:test";
|
||||
import { beforeAll, beforeEach, afterAll, afterEach } from "bun:test";
|
||||
|
||||
function set(name: string, value: unknown): void {
|
||||
// @ts-expect-error
|
||||
globalThis[name] = value;
|
||||
}
|
||||
|
||||
set("describe", describe);
|
||||
set("test", test);
|
||||
set("it", it);
|
||||
set("before", beforeAll);
|
||||
set("beforeEach", beforeEach);
|
||||
set("after", afterAll);
|
||||
set("afterEach", afterEach);
|
||||
@@ -3,13 +3,5 @@
|
||||
"package": "elysia",
|
||||
"repository": "https://github.com/elysiajs/elysia",
|
||||
"tag": "1.1.24"
|
||||
},
|
||||
{
|
||||
"package": "uuid",
|
||||
"repository": "https://github.com/uuidjs/uuid",
|
||||
"tag": "v10.0.0",
|
||||
"testRunner": "node",
|
||||
"testPath": "src/test",
|
||||
"skipTests": true
|
||||
}
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user