diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 575e62f10d..c9c0051987 100644 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -7,113 +7,22 @@ import { writeFileSync } from "node:fs"; import { join } from "node:path"; - -function getEnv(name, required = true) { - const value = process.env[name]; - - if (!value && required) { - throw new Error(`Missing environment variable: ${name}`); - } - - return value; -} - -function getRepository() { - const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO"); - const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/); - if (!match) { - throw new Error(`Unsupported repository: ${url}`); - } - const [, owner, repo] = match; - return `${owner}/${repo}`; -} - -function getCommit() { - return getEnv("BUILDKITE_COMMIT"); -} - -function getCommitMessage() { - return getEnv("BUILDKITE_MESSAGE", false) || ""; -} - -function getBranch() { - return getEnv("BUILDKITE_BRANCH"); -} - -function getMainBranch() { - return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main"; -} - -function isFork() { - const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false); - return !!repository && repository !== getEnv("BUILDKITE_REPO"); -} - -function isMainBranch() { - return getBranch() === getMainBranch() && !isFork(); -} - -function isMergeQueue() { - return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH")); -} - -function isPullRequest() { - return getEnv("BUILDKITE_PULL_REQUEST", false) === "true"; -} - -async function getChangedFiles() { - const repository = getRepository(); - const head = getCommit(); - const base = `${head}^1`; - - try { - const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`); - if (response.ok) { - const { files } = await response.json(); - return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename); - } - } catch (error) { - console.error(error); - } -} - -function getBuildUrl() { - return getEnv("BUILDKITE_BUILD_URL"); -} - -async function getBuildIdWithArtifacts() { - let depth = 0; - let url = getBuildUrl(); - - while (url) { - const response = await fetch(`${url}.json`, { - headers: { "Accept": "application/json" }, - }); - - if (!response.ok) { - return; - } - - const { id, state, prev_branch_build: lastBuild, steps } = await response.json(); - if (depth++) { - if (state === "failed" || state === "passed") { - const buildSteps = steps.filter(({ label }) => label.endsWith("build-bun")); - if (buildSteps.length) { - if (buildSteps.every(({ outcome }) => outcome === "passed")) { - return id; - } - return; - } - } - } - - if (!lastBuild) { - return; - } - - url = url.replace(/\/builds\/[0-9]+/, `/builds/${lastBuild["number"]}`); - } -} +import { + getCanaryRevision, + getChangedFiles, + getCommit, + getCommitMessage, + getLastSuccessfulBuild, + getMainBranch, + getTargetBranch, + isBuildkite, + isFork, + isMainBranch, + isMergeQueue, + printEnvironment, + spawnSafe, + startGroup, +} from "../scripts/utils.mjs"; function toYaml(obj, indent = 0) { const spaces = " ".repeat(indent); @@ -447,59 +356,82 @@ function getPipeline(buildId) { } async function main() { - console.log("Checking environment..."); - console.log(" - Repository:", getRepository()); - console.log(" - Branch:", getBranch()); - console.log(" - Commit:", getCommit()); - console.log(" - Commit Message:", getCommitMessage()); - console.log(" - Is Main Branch:", isMainBranch()); - console.log(" - Is Merge Queue:", isMergeQueue()); - console.log(" - Is Pull Request:", isPullRequest()); + printEnvironment(); - const changedFiles = await getChangedFiles(); + console.log("Checking last successful build..."); + const lastBuild = await getLastSuccessfulBuild(); + if (lastBuild) { + const { id, path, commit_id: commit } = lastBuild; + console.log(" - Build ID:", id); + console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString()); + console.log(" - Commit:", commit); + } else { + console.log(" - No build found"); + } + + console.log("Checking changed files..."); + const baseRef = getCommit(); + console.log(" - Base Ref:", baseRef); + const headRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch(); + console.log(" - Head Ref:", headRef); + + const changedFiles = await getChangedFiles(undefined, baseRef, headRef); if (changedFiles) { - console.log( - `Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`, - ); + if (changedFiles.length) { + changedFiles.forEach(filename => console.log(` - ${filename}`)); + } else { + console.log(" - No changed files"); + } } const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename); + const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename); - const isSkip = () => { + console.log("Checking if CI should be skipped..."); + { const message = getCommitMessage(); - if (/\[(skip ci|no ci|ci skip|ci no)\]/i.test(message)) { - return true; + const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message); + if (match) { + const [, reason] = match; + console.log(" - Yes, because commit message contains:", reason); + return; } - return changedFiles && changedFiles.every(filename => isDocumentationFile(filename)); - }; - - if (isSkip()) { - console.log("Skipping CI due to commit message or changed files..."); + } + if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) { + console.log(" - Yes, because all changed files are documentation"); return; } - const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename); - - const isSkipBuild = () => { + console.log("Checking if build should be skipped..."); + let skipBuild; + { const message = getCommitMessage(); - if (/\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.test(message)) { - return true; + const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message); + if (match) { + const [, reason] = match; + console.log(" - Yes, because commit message contains:", reason); + skipBuild = true; } - return changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename)); - }; + } + if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) { + console.log(" - Yes, because all changed files are tests or documentation"); + skipBuild = true; + } - let buildId; - if (isSkipBuild()) { - buildId = await getBuildIdWithArtifacts(); - if (buildId) { - console.log("Skipping build due to commit message or changed files..."); - console.log("Using build artifacts from previous build:", buildId); - } else { - console.log("Attempted to skip build, but could not find previous build"); + console.log("Checking if build is a named release..."); + let buildRelease; + { + const message = getCommitMessage(); + const match = /\[(release|release build|build release)\]/i.exec(message); + if (match) { + const [, reason] = match; + console.log(" - Yes, because commit message contains:", reason); + buildRelease = true; } } - const pipeline = getPipeline(buildId); + console.log("Generating pipeline..."); + const pipeline = getPipeline(lastBuild && skipBuild ? lastBuild.id : undefined); const content = toYaml(pipeline); const contentPath = join(process.cwd(), ".buildkite", "ci.yml"); writeFileSync(contentPath, content); @@ -507,6 +439,15 @@ async function main() { console.log("Generated pipeline:"); console.log(" - Path:", contentPath); console.log(" - Size:", (content.length / 1024).toFixed(), "KB"); + + if (isBuildkite) { + console.log("Setting canary revision..."); + const canaryRevision = buildRelease ? 0 : await getCanaryRevision(); + await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`]); + + console.log("Uploading pipeline..."); + await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath]); + } } await main(); diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh index ab7510ed1e..a76370fd7c 100755 --- a/.buildkite/scripts/prepare-build.sh +++ b/.buildkite/scripts/prepare-build.sh @@ -2,106 +2,10 @@ set -eo pipefail -function assert_build() { - if [ -z "$BUILDKITE_REPO" ]; then - echo "error: Cannot find repository for this build" - exit 1 - fi - if [ -z "$BUILDKITE_COMMIT" ]; then - echo "error: Cannot find commit for this build" - exit 1 - fi -} - -function assert_buildkite_agent() { - if ! command -v buildkite-agent &> /dev/null; then - echo "error: Cannot find buildkite-agent, please install it:" - echo "https://buildkite.com/docs/agent/v3/install" - exit 1 - fi -} - -function assert_jq() { - assert_command "jq" "jq" "https://stedolan.github.io/jq/" -} - -function assert_curl() { - assert_command "curl" "curl" "https://curl.se/download.html" -} - -function assert_node() { - assert_command "node" "node" "https://nodejs.org/en/download/" -} - -function assert_command() { - local command="$1" - local package="$2" - local help_url="$3" - if ! command -v "$command" &> /dev/null; then - echo "warning: $command is not installed, installing..." - if command -v brew &> /dev/null; then - HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package" - else - echo "error: Cannot install $command, please install it" - if [ -n "$help_url" ]; then - echo "" - echo "hint: See $help_url for help" - fi - exit 1 - fi - fi -} - -function assert_release() { - if [ "$RELEASE" == "1" ]; then - run_command buildkite-agent meta-data set canary "0" - fi -} - -function assert_canary() { - local canary="$(buildkite-agent meta-data get canary 2>/dev/null)" - if [ -z "$canary" ]; then - local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g') - local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")" - if [ "$tag" == "null" ]; then - canary="1" - else - local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by") - if [ "$revision" == "null" ]; then - canary="1" - else - canary="$revision" - fi - fi - run_command buildkite-agent meta-data set canary "$canary" - fi -} - -function upload_buildkite_pipeline() { - local path="$1" - if [ ! -f "$path" ]; then - echo "error: Cannot find pipeline: $path" - exit 1 - fi - run_command buildkite-agent pipeline upload "$path" -} - function run_command() { set -x "$@" { set +x; } 2>/dev/null } -assert_build -assert_buildkite_agent -assert_jq -assert_curl -assert_node -assert_release -assert_canary - run_command node ".buildkite/ci.mjs" - -if [ -f ".buildkite/ci.yml" ]; then - upload_buildkite_pipeline ".buildkite/ci.yml" -fi diff --git a/LATEST b/LATEST index d28d4019a0..9b51125a6c 100644 --- a/LATEST +++ b/LATEST @@ -1 +1 @@ -1.1.33 \ No newline at end of file +1.1.34 \ No newline at end of file diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index 54188ec973..677c8c0ad3 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use") option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading") if(NOT WEBKIT_VERSION) - set(WEBKIT_VERSION 4f92f334956b250a6bc4ecc1529945bacd22d96c) + set(WEBKIT_VERSION 73b551e25d97e463e8e2c86cb819b8639fcbda06) endif() if(WEBKIT_LOCAL) diff --git a/package.json b/package.json index 3cea156710..c8f3afe154 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "private": true, "name": "bun", - "version": "1.1.34", + "version": "1.1.35", "workspaces": [ "./packages/bun-types" ], diff --git a/scripts/runner.node.mjs b/scripts/runner.node.mjs index af5f02732b..898b596a50 100755 --- a/scripts/runner.node.mjs +++ b/scripts/runner.node.mjs @@ -20,42 +20,32 @@ import { rmSync, } from "node:fs"; import { spawn, spawnSync } from "node:child_process"; -import { tmpdir, hostname, userInfo, homedir } from "node:os"; import { join, basename, dirname, relative, sep } from "node:path"; -import { normalize as normalizeWindows } from "node:path/win32"; -import { isIP } from "node:net"; import { parseArgs } from "node:util"; +import { + getBuildLabel, + getBuildUrl, + getEnv, + getFileUrl, + getWindowsExitReason, + isBuildkite, + isCI, + isGithubAction, + isWindows, + printEnvironment, + startGroup, + tmpdir, + unzip, +} from "./utils.mjs"; +import { userInfo } from "node:os"; + +const cwd = dirname(import.meta.dirname); +const testsPath = join(cwd, "test"); const spawnTimeout = 5_000; const testTimeout = 3 * 60_000; const integrationTimeout = 5 * 60_000; -const isLinux = process.platform === "linux"; -const isMacOS = process.platform === "darwin"; -const isWindows = process.platform === "win32"; - -const isGitHubAction = !!process.env["GITHUB_ACTIONS"]; -const isBuildKite = !!process.env["BUILDKITE"]; -const isBuildKiteTestSuite = !!process.env["BUILDKITE_ANALYTICS_TOKEN"]; -const isCI = !!process.env["CI"] || isGitHubAction || isBuildKite; - -const isAWS = - /^ec2/i.test(process.env["USERNAME"]) || - /^ec2/i.test(process.env["USER"]) || - /^(?:ec2|ip)/i.test(process.env["HOSTNAME"]) || - /^(?:ec2|ip)/i.test(getHostname()); -const isCloud = isAWS; - -const baseUrl = process.env["GITHUB_SERVER_URL"] || "https://github.com"; -const repository = process.env["GITHUB_REPOSITORY"] || "oven-sh/bun"; -const pullRequest = /^pull\/(\d+)$/.exec(process.env["GITHUB_REF"])?.[1]; -const gitSha = getGitSha(); -const gitRef = getGitRef(); - -const cwd = dirname(import.meta.dirname); -const testsPath = join(cwd, "test"); -const tmpPath = getTmpdir(); - const { values: options, positionals: filters } = parseArgs({ allowPositionals: true, options: { @@ -73,11 +63,11 @@ const { values: options, positionals: filters } = parseArgs({ }, ["shard"]: { type: "string", - default: process.env["BUILDKITE_PARALLEL_JOB"] || "0", + default: getEnv("BUILDKITE_PARALLEL_JOB", false) || "0", }, ["max-shards"]: { type: "string", - default: process.env["BUILDKITE_PARALLEL_JOB_COUNT"] || "1", + default: getEnv("BUILDKITE_PARALLEL_JOB_COUNT", false) || "1", }, ["include"]: { type: "string", @@ -100,37 +90,6 @@ const { values: options, positionals: filters } = parseArgs({ }, }); -async function printInfo() { - console.log("Timestamp:", new Date()); - console.log("OS:", getOsPrettyText(), getOsEmoji()); - console.log("Arch:", getArchText(), getArchEmoji()); - if (isLinux) { - console.log("Glibc:", getGlibcVersion()); - } - console.log("Hostname:", getHostname()); - if (isCI) { - console.log("CI:", getCI()); - console.log("Shard:", options["shard"], "/", options["max-shards"]); - console.log("Build URL:", getBuildUrl()); - console.log("Environment:", process.env); - if (isCloud) { - console.log("Public IP:", await getPublicIp()); - console.log("Cloud:", getCloud()); - } - const tailscaleIp = await getTailscaleIp(); - if (tailscaleIp) { - console.log("Tailscale IP:", tailscaleIp); - } - } - console.log("Cwd:", cwd); - console.log("Tmpdir:", tmpPath); - console.log("Commit:", gitSha); - console.log("Ref:", gitRef); - if (pullRequest) { - console.log("Pull Request:", pullRequest); - } -} - /** * * @returns {Promise} @@ -197,27 +156,32 @@ async function runTests() { */ const runTest = async (title, fn) => { const label = `${getAnsi("gray")}[${++i}/${total}]${getAnsi("reset")} ${title}`; - const result = await runTask(label, fn); + const result = await startGroup(label, fn); results.push(result); - if (isBuildKite) { + if (isBuildkite) { const { ok, error, stdoutPreview } = result; - const markdown = formatTestToMarkdown(result); - if (markdown) { - const style = title.startsWith("vendor") ? "warning" : "error"; - const priority = title.startsWith("vendor") ? 1 : 5; - reportAnnotationToBuildKite({ label: title, content: markdown, style, priority }); + if (title.startsWith("vendor")) { + const markdown = formatTestToMarkdown({ ...result, testPath: title }); + if (markdown) { + reportAnnotationToBuildKite({ label: title, content: markdown, style: "warning", priority: 5 }); + } + } else { + const markdown = formatTestToMarkdown(result); + if (markdown) { + reportAnnotationToBuildKite({ label: title, content: markdown, style: "error" }); + } } if (!ok) { const label = `${getAnsi("red")}[${i}/${total}] ${title} - ${error}${getAnsi("reset")}`; - await runTask(label, () => { + startGroup(label, () => { process.stderr.write(stdoutPreview); }); } } - if (isGitHubAction) { + if (isGithubAction) { const summaryPath = process.env["GITHUB_STEP_SUMMARY"]; if (summaryPath) { const longMarkdown = formatTestToMarkdown(result); @@ -267,23 +231,24 @@ async function runTests() { if (testRunner === "bun") { await runTest(title, () => spawnBunTest(execPath, testPath, { cwd: vendorPath })); - } else if (testRunner === "node") { - const preload = join(import.meta.dirname, "..", "test", "runners", "node.ts"); + } else { + const testRunnerPath = join(import.meta.dirname, "..", "test", "runners", `${testRunner}.ts`); + if (!existsSync(testRunnerPath)) { + throw new Error(`Unsupported test runner: ${testRunner}`); + } await runTest(title, () => - spawnBun(execPath, { + spawnBunTest(execPath, testPath, { cwd: vendorPath, - args: ["--preload", preload, testPath], + args: ["--preload", testRunnerPath], }), ); - } else { - throw new Error(`Unsupported test runner: ${testRunner}`); } } } } const failedTests = results.filter(({ ok }) => !ok); - if (isGitHubAction) { + if (isGithubAction) { reportOutputToGitHubAction("failing_tests_count", failedTests.length); const markdown = formatTestToMarkdown(failedTests); reportOutputToGitHubAction("failing_tests", markdown); @@ -462,7 +427,7 @@ async function spawnSafe(options) { error = "timeout"; } else if (exitCode !== 0) { if (isWindows) { - const winCode = getWindowsExitCode(exitCode); + const winCode = getWindowsExitReason(exitCode); if (winCode) { exitCode = winCode; } @@ -488,14 +453,14 @@ async function spawnSafe(options) { */ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { const path = addPath(dirname(execPath), process.env.PATH); - const tmpdirPath = mkdtempSync(join(tmpPath, "buntmp-")); - const { username } = userInfo(); + const tmpdirPath = mkdtempSync(join(tmpdir(), "buntmp-")); + const { username, homedir } = userInfo(); const bunEnv = { ...process.env, PATH: path, TMPDIR: tmpdirPath, USER: username, - HOME: homedir(), + HOME: homedir, FORCE_COLOR: "1", BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1", BUN_DEBUG_QUIET_LOGS: "1", @@ -511,23 +476,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { if (env) { Object.assign(bunEnv, env); } - // Use Linux namespaces to isolate the child process - // https://man7.org/linux/man-pages/man1/unshare.1.html - // if (isLinux) { - // const { uid, gid } = userInfo(); - // args = [ - // `--wd=${cwd}`, - // "--user", - // `--map-user=${uid}`, - // `--map-group=${gid}`, - // "--fork", - // "--kill-child", - // "--pid", - // execPath, - // ...args, - // ]; - // execPath = "unshare"; - // } if (isWindows) { delete bunEnv["PATH"]; bunEnv["Path"] = path; @@ -592,15 +540,17 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { * @param {string} testPath * @param {object} [options] * @param {string} [options.cwd] + * @param {string[]} [options.args] * @returns {Promise} */ async function spawnBunTest(execPath, testPath, options = { cwd }) { const timeout = getTestTimeout(testPath); const perTestTimeout = Math.ceil(timeout / 2); - const isReallyTest = isTestStrict(testPath); const absPath = join(options["cwd"], testPath); + const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor"); + const args = options["args"] ?? []; const { ok, error, stdout } = await spawnBun(execPath, { - args: isReallyTest ? ["test", `--timeout=${perTestTimeout}`, absPath] : [absPath], + args: isReallyTest ? ["test", ...args, `--timeout=${perTestTimeout}`, absPath] : [...args, absPath], cwd: options["cwd"], timeout: isReallyTest ? timeout : 30_000, env: { @@ -638,9 +588,9 @@ function getTestTimeout(testPath) { * @param {string} chunk */ function pipeTestStdout(io, chunk) { - if (isGitHubAction) { + if (isGithubAction) { io.write(chunk.replace(/\:\:(?:end)?group\:\:.*(?:\r\n|\r|\n)/gim, "")); - } else if (isBuildKite) { + } else if (isBuildkite) { io.write(chunk.replace(/(?:---|\+\+\+|~~~|\^\^\^) /gim, " ").replace(/\:\:.*(?:\r\n|\r|\n)/gim, "")); } else { io.write(chunk.replace(/\:\:.*(?:\r\n|\r|\n)/gim, "")); @@ -799,75 +749,6 @@ async function spawnBunInstall(execPath, options) { }; } -/** - * @returns {string | undefined} - */ -function getGitSha() { - const sha = process.env["GITHUB_SHA"] || process.env["BUILDKITE_COMMIT"]; - if (sha?.length === 40) { - return sha; - } - try { - const { stdout } = spawnSync("git", ["rev-parse", "HEAD"], { - encoding: "utf-8", - timeout: spawnTimeout, - }); - return stdout.trim(); - } catch (error) { - console.warn(error); - } -} - -/** - * @returns {string} - */ -function getGitRef() { - const ref = process.env["GITHUB_REF_NAME"] || process.env["BUILDKITE_BRANCH"]; - if (ref) { - return ref; - } - try { - const { stdout } = spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { - encoding: "utf-8", - timeout: spawnTimeout, - }); - return stdout.trim(); - } catch (error) { - console.warn(error); - return ""; - } -} - -/** - * @returns {string} - */ -function getTmpdir() { - if (isWindows) { - for (const key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP", "RUNNER_TEMP"]) { - const tmpdir = process.env[key] || ""; - // HACK: There are too many bugs with cygwin directories. - // We should probably run Windows tests in both cygwin and powershell. - if (/cygwin|cygdrive/i.test(tmpdir) || !/^[a-z]/i.test(tmpdir)) { - continue; - } - return normalizeWindows(tmpdir); - } - const appData = process.env["LOCALAPPDATA"]; - if (appData) { - const appDataTemp = join(appData, "Temp"); - if (existsSync(appDataTemp)) { - return appDataTemp; - } - } - } - if (isMacOS) { - if (existsSync("/tmp")) { - return "/tmp"; - } - } - return tmpdir(); -} - /** * @param {string} path * @returns {boolean} @@ -937,6 +818,7 @@ function getTests(cwd) { * @property {string} [packageManager] * @property {string} [testPath] * @property {string} [testRunner] + * @property {string[]} [testExtensions] * @property {boolean | Record} [skipTests] */ @@ -979,68 +861,77 @@ async function getVendorTests(cwd) { } return Promise.all( - relevantVendors.map(async ({ package: name, repository, tag, testPath, testRunner, packageManager, skipTests }) => { - const vendorPath = join(cwd, "vendor", name); + relevantVendors.map( + async ({ package: name, repository, tag, testPath, testExtensions, testRunner, packageManager, skipTests }) => { + const vendorPath = join(cwd, "vendor", name); + + if (!existsSync(vendorPath)) { + await spawnSafe({ + command: "git", + args: ["clone", "--depth", "1", "--single-branch", repository, vendorPath], + timeout: testTimeout, + cwd, + }); + } - if (!existsSync(vendorPath)) { await spawnSafe({ command: "git", - args: ["clone", "--depth", "1", "--single-branch", repository, vendorPath], + args: ["fetch", "--depth", "1", "origin", "tag", tag], timeout: testTimeout, - cwd, + cwd: vendorPath, }); - } - await spawnSafe({ - command: "git", - args: ["fetch", "--depth", "1", "origin", "tag", tag], - timeout: testTimeout, - cwd: vendorPath, - }); - - const packageJsonPath = join(vendorPath, "package.json"); - if (!existsSync(packageJsonPath)) { - throw new Error(`Vendor '${name}' does not have a package.json: ${packageJsonPath}`); - } - - const testPathPrefix = testPath || "test"; - const testParentPath = join(vendorPath, testPathPrefix); - if (!existsSync(testParentPath)) { - throw new Error(`Vendor '${name}' does not have a test directory: ${testParentPath}`); - } - - const isTest = path => { - if (!isJavaScriptTest(path)) { - return false; + const packageJsonPath = join(vendorPath, "package.json"); + if (!existsSync(packageJsonPath)) { + throw new Error(`Vendor '${name}' does not have a package.json: ${packageJsonPath}`); } - if (typeof skipTests === "boolean") { - return !skipTests; + const testPathPrefix = testPath || "test"; + const testParentPath = join(vendorPath, testPathPrefix); + if (!existsSync(testParentPath)) { + throw new Error(`Vendor '${name}' does not have a test directory: ${testParentPath}`); } - if (typeof skipTests === "object") { - for (const [glob, reason] of Object.entries(skipTests)) { - const pattern = new RegExp(`^${glob.replace(/\*/g, ".*")}$`); - if (pattern.test(path) && reason) { - return false; + const isTest = path => { + if (!isJavaScriptTest(path)) { + return false; + } + + if (typeof skipTests === "boolean") { + return !skipTests; + } + + if (typeof skipTests === "object") { + for (const [glob, reason] of Object.entries(skipTests)) { + const pattern = new RegExp(`^${glob.replace(/\*/g, ".*")}$`); + if (pattern.test(path) && reason) { + return false; + } } } - } - return true; - }; + return true; + }; - const testPaths = readdirSync(testParentPath, { encoding: "utf-8", recursive: true }) - .filter(filename => isTest(filename)) - .map(filename => join(testPathPrefix, filename)); + const testPaths = readdirSync(testParentPath, { encoding: "utf-8", recursive: true }) + .filter(filename => + testExtensions ? testExtensions.some(ext => filename.endsWith(`.${ext}`)) : isTest(filename), + ) + .map(filename => join(testPathPrefix, filename)) + .filter( + filename => + !filters?.length || + filters.some(filter => join(vendorPath, filename).replace(/\\/g, "/").includes(filter)), + ); - return { - cwd: vendorPath, - packageManager: packageManager || "bun", - testRunner: testRunner || "bun", - testPaths, - }; - }), + return { + cwd: vendorPath, + packageManager: packageManager || "bun", + testRunner: testRunner || "bun", + testPaths, + }; + }, + ), ); } @@ -1126,27 +1017,6 @@ function getRelevantTests(cwd) { return filteredTests; } -let ntStatus; - -/** - * @param {number} exitCode - * @returns {string} - */ -function getWindowsExitCode(exitCode) { - if (ntStatus === undefined) { - const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h"; - try { - ntStatus = readFileSync(ntStatusPath, "utf-8"); - } catch (error) { - console.warn(error); - ntStatus = ""; - } - } - - const match = ntStatus.match(new RegExp(`(STATUS_\\w+).*0x${exitCode?.toString(16)}`, "i")); - return match?.[1]; -} - /** * @param {string} bunExe * @returns {string} @@ -1216,17 +1086,7 @@ async function getExecPathFromBuildKite(target) { throw new Error(`Could not find ${target}.zip from Buildkite: ${releasePath}`); } - if (isWindows) { - await spawnSafe({ - command: "powershell", - args: ["-Command", `Expand-Archive -Path ${zipPath} -DestinationPath ${releasePath} -Force`], - }); - } else { - await spawnSafe({ - command: "unzip", - args: ["-o", zipPath, "-d", releasePath], - }); - } + await unzip(zipPath, releasePath); for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) { const execPath = join(releasePath, entry); @@ -1262,308 +1122,6 @@ function getRevision(execPath) { } } -/** - * @returns {string} - */ -function getOsText() { - const { platform } = process; - switch (platform) { - case "darwin": - return "darwin"; - case "linux": - return "linux"; - case "win32": - return "windows"; - default: - return platform; - } -} - -/** - * @returns {string} - */ -function getOsPrettyText() { - const { platform } = process; - if (platform === "darwin") { - const properties = {}; - for (const property of ["productName", "productVersion", "buildVersion"]) { - try { - const { error, stdout } = spawnSync("sw_vers", [`-${property}`], { - encoding: "utf-8", - timeout: spawnTimeout, - env: { - PATH: process.env.PATH, - }, - }); - if (error) { - throw error; - } - properties[property] = stdout.trim(); - } catch (error) { - console.warn(error); - } - } - const { productName, productVersion, buildVersion } = properties; - if (!productName) { - return "macOS"; - } - if (!productVersion) { - return productName; - } - if (!buildVersion) { - return `${productName} ${productVersion}`; - } - return `${productName} ${productVersion} (build: ${buildVersion})`; - } - if (platform === "linux") { - try { - const { error, stdout } = spawnSync("lsb_release", ["--description", "--short"], { - encoding: "utf-8", - timeout: spawnTimeout, - env: { - PATH: process.env.PATH, - }, - }); - if (error) { - throw error; - } - return stdout.trim(); - } catch (error) { - console.warn(error); - return "Linux"; - } - } - if (platform === "win32") { - try { - const { error, stdout } = spawnSync("cmd", ["/c", "ver"], { - encoding: "utf-8", - timeout: spawnTimeout, - env: { - PATH: process.env.PATH, - }, - }); - if (error) { - throw error; - } - return stdout.trim(); - } catch (error) { - console.warn(error); - return "Windows"; - } - } - return platform; -} - -/** - * @returns {string} - */ -function getOsEmoji() { - const { platform } = process; - switch (platform) { - case "darwin": - return isBuildKite ? ":apple:" : ""; - case "win32": - return isBuildKite ? ":windows:" : "🪟"; - case "linux": - return isBuildKite ? ":linux:" : "🐧"; - default: - return "🔮"; - } -} - -/** - * @returns {string} - */ -function getArchText() { - const { arch } = process; - switch (arch) { - case "x64": - return "x64"; - case "arm64": - return "aarch64"; - default: - return arch; - } -} - -/** - * @returns {string} - */ -function getArchEmoji() { - const { arch } = process; - switch (arch) { - case "x64": - return "🖥"; - case "arm64": - return "💪"; - default: - return "🔮"; - } -} - -/** - * @returns {string | undefined} - */ -function getGlibcVersion() { - if (!isLinux) { - return; - } - try { - const { header } = process.report.getReport(); - const { glibcVersionRuntime } = header; - if (typeof glibcVersionRuntime === "string") { - return glibcVersionRuntime; - } - } catch (error) { - console.warn(error); - } -} - -/** - * @returns {string | undefined} - */ -function getBuildUrl() { - if (isBuildKite) { - const buildUrl = process.env["BUILDKITE_BUILD_URL"]; - const jobId = process.env["BUILDKITE_JOB_ID"]; - if (buildUrl) { - return jobId ? `${buildUrl}#${jobId}` : buildUrl; - } - } - if (isGitHubAction) { - const baseUrl = process.env["GITHUB_SERVER_URL"]; - const repository = process.env["GITHUB_REPOSITORY"]; - const runId = process.env["GITHUB_RUN_ID"]; - if (baseUrl && repository && runId) { - return `${baseUrl}/${repository}/actions/runs/${runId}`; - } - } -} - -/** - * @returns {string} - */ -function getBuildLabel() { - if (isBuildKite) { - const label = process.env["BUILDKITE_LABEL"] || process.env["BUILDKITE_GROUP_LABEL"]; - if (label) { - return label.replace("- test-bun", "").replace("- bun-test", "").trim(); - } - } - return `${getOsEmoji()} ${getArchText()}`; -} - -/** - * @param {string} file - * @param {number} [line] - * @returns {string | undefined} - */ -function getFileUrl(file, line) { - const filePath = file.replace(/\\/g, "/"); - - let url; - if (pullRequest) { - const fileMd5 = crypto.createHash("md5").update(filePath).digest("hex"); - url = `${baseUrl}/${repository}/pull/${pullRequest}/files#diff-${fileMd5}`; - if (line !== undefined) { - url += `L${line}`; - } - } else if (gitSha) { - url = `${baseUrl}/${repository}/blob/${gitSha}/${filePath}`; - if (line !== undefined) { - url += `#L${line}`; - } - } - - return url; -} - -/** - * @returns {string | undefined} - */ -function getCI() { - if (isBuildKite) { - return "BuildKite"; - } - if (isGitHubAction) { - return "GitHub Actions"; - } - if (isCI) { - return "CI"; - } -} - -/** - * @returns {string | undefined} - */ -function getCloud() { - if (isAWS) { - return "AWS"; - } -} - -/** - * @returns {string | undefined} - */ -function getHostname() { - if (isBuildKite) { - return process.env["BUILDKITE_AGENT_NAME"]; - } - try { - return hostname(); - } catch (error) { - console.warn(error); - } -} - -/** - * @returns {Promise} - */ -async function getPublicIp() { - const addressUrls = ["https://checkip.amazonaws.com", "https://ipinfo.io/ip"]; - if (isAWS) { - addressUrls.unshift("http://169.254.169.254/latest/meta-data/public-ipv4"); - } - for (const url of addressUrls) { - try { - const response = await fetch(url); - const { ok, status, statusText } = response; - if (!ok) { - throw new Error(`${status} ${statusText}: ${url}`); - } - const text = await response.text(); - const address = text.trim(); - if (isIP(address)) { - return address; - } else { - throw new Error(`Invalid IP address: ${address}`); - } - } catch (error) { - console.warn(error); - } - } -} - -/** - * @returns {string | undefined} - */ -function getTailscaleIp() { - try { - const { status, stdout } = spawnSync("tailscale", ["ip", "--1"], { - encoding: "utf-8", - timeout: spawnTimeout, - env: { - PATH: process.env.PATH, - }, - }); - if (status === 0) { - return stdout.trim(); - } - } catch { - // ... - } -} - /** * @param {...string} paths * @returns {string} @@ -1575,28 +1133,6 @@ function addPath(...paths) { return paths.join(":"); } -/** - * @param {string} title - * @param {function} fn - */ -async function runTask(title, fn) { - if (isGitHubAction) { - console.log(`::group::${stripAnsi(title)}`); - } else if (isBuildKite) { - console.log(`--- ${title}`); - } else { - console.log(title); - } - try { - return await fn(); - } finally { - if (isGitHubAction) { - console.log("::endgroup::"); - } - console.log(); - } -} - /** * @param {TestResult | TestResult[]} result * @param {boolean} concise @@ -1649,7 +1185,7 @@ function formatTestToMarkdown(result, concise) { markdown += "\n"; } else { markdown += "\n\n"; - if (isBuildKite) { + if (isBuildkite) { const preview = escapeCodeBlock(stdout); markdown += `\`\`\`terminal\n${preview}\n\`\`\`\n`; } else { @@ -1843,42 +1379,6 @@ function parseDuration(duration) { return parseFloat(value) * (unit === "ms" ? 1 : 1000); } -/** - * @param {string} status - * @returns {string} - */ -function getTestEmoji(status) { - switch (status) { - case "pass": - return "✅"; - case "fail": - return "❌"; - case "skip": - return "⏭"; - case "todo": - return "✏️"; - default: - return "🔮"; - } -} - -/** - * @param {string} status - * @returns {string} - */ -function getTestColor(status) { - switch (status) { - case "pass": - return getAnsi("green"); - case "fail": - return getAnsi("red"); - case "skip": - case "todo": - default: - return getAnsi("gray"); - } -} - /** * @param {string} execPath * @returns {boolean} @@ -1902,7 +1402,7 @@ function getExitCode(outcome) { if (outcome === "pass") { return 0; } - if (!isBuildKite) { + if (!isBuildkite) { return 1; } // On Buildkite, you can define a `soft_fail` property to differentiate @@ -1916,52 +1416,25 @@ function getExitCode(outcome) { return 1; } -/** - * @returns {Promise} - */ -async function getDoomsdayDate() { - try { - const response = await fetch("http://169.254.169.254/latest/meta-data/spot/instance-action"); - if (response.ok) { - const { time } = await response.json(); - return new Date(time); - } - } catch { - // Ignore - } -} - /** * @param {string} signal */ -async function beforeExit(signal) { - const endOfWorld = await getDoomsdayDate(); - if (endOfWorld) { - const timeMin = 10 * 1000; - const timeLeft = Math.max(0, date.getTime() - Date.now()); - if (timeLeft > timeMin) { - setTimeout(() => onExit(signal), timeLeft - timeMin); - return; - } - } - onExit(signal); -} - -/** - * @param {string} signal - */ -async function onExit(signal) { +function onExit(signal) { const label = `${getAnsi("red")}Received ${signal}, exiting...${getAnsi("reset")}`; - await runTask(label, () => { + startGroup(label, () => { process.exit(getExitCode("cancel")); }); } -for (const signal of ["SIGINT", "SIGTERM", "SIGHUP"]) { - process.on(signal, () => beforeExit(signal)); +export async function main() { + for (const signal of ["SIGINT", "SIGTERM", "SIGHUP"]) { + process.on(signal, () => onExit(signal)); + } + + printEnvironment(); + const results = await runTests(); + const ok = results.every(({ ok }) => ok); + process.exit(getExitCode(ok ? "pass" : "fail")); } -await runTask("Environment", printInfo); -const results = await runTests(); -const ok = results.every(({ ok }) => ok); -process.exit(getExitCode(ok ? "pass" : "fail")); +await main(); diff --git a/scripts/utils.mjs b/scripts/utils.mjs new file mode 100644 index 0000000000..185bebf7d7 --- /dev/null +++ b/scripts/utils.mjs @@ -0,0 +1,1609 @@ +// Contains utility functions for various scripts, including: +// CI, running tests, and code generation. + +import { spawn as nodeSpawn, spawnSync as nodeSpawnSync } from "node:child_process"; +import { createHash } from "node:crypto"; +import { appendFileSync, existsSync, mkdtempSync, readdirSync, readFileSync, writeFileSync } from "node:fs"; +import { writeFile } from "node:fs/promises"; +import { hostname, tmpdir as nodeTmpdir, userInfo } from "node:os"; +import { dirname, join, relative, resolve } from "node:path"; +import { normalize as normalizeWindows } from "node:path/win32"; + +export const isWindows = process.platform === "win32"; +export const isMacOS = process.platform === "darwin"; +export const isLinux = process.platform === "linux"; +export const isPosix = isMacOS || isLinux; + +/** + * @param {string} name + * @param {boolean} [required] + * @returns {string} + */ +export function getEnv(name, required = true) { + const value = process.env[name]; + + if (required && !value) { + throw new Error(`Environment variable is missing: ${name}`); + } + + return value; +} + +export const isBuildkite = getEnv("BUILDKITE", false) === "true"; +export const isGithubAction = getEnv("GITHUB_ACTIONS", false) === "true"; +export const isCI = getEnv("CI", false) === "true" || isBuildkite || isGithubAction; +export const isDebug = getEnv("DEBUG", false) === "1"; + +/** + * @param {string} name + * @param {object} [options] + * @param {boolean} [options.required] + * @param {boolean} [options.redact] + * @returns {string} + */ +export function getSecret(name, options = { required: true, redact: true }) { + const value = getEnv(name, false); + if (value) { + return value; + } + + if (isBuildkite) { + const command = ["buildkite-agent", "secret", "get", name]; + if (options["redact"] === false) { + command.push("--skip-redaction"); + } + + const { error, stdout: secret } = spawnSync(command); + if (error || !secret.trim()) { + const orgId = getEnv("BUILDKITE_ORGANIZATION_SLUG", false); + const clusterId = getEnv("BUILDKITE_CLUSTER_ID", false); + + let hint; + if (orgId && clusterId) { + hint = `https://buildkite.com/organizations/${orgId}/clusters/${clusterId}/secrets`; + } else { + hint = "https://buildkite.com/docs/pipelines/buildkite-secrets"; + } + + throw new Error(`Secret not found: ${name} (hint: go to ${hint} and create a secret)`, { cause: error }); + } + + setEnv(name, secret); + return secret; + } + + return getEnv(name, options["required"]); +} + +/** + * @param {...unknown} args + */ +export function debugLog(...args) { + if (isDebug) { + console.log(...args); + } +} + +/** + * @param {string} name + * @param {string | undefined} value + */ +export function setEnv(name, value) { + process.env[name] = value; + + if (isGithubAction && !/^GITHUB_/i.test(name)) { + const envFilePath = process.env["GITHUB_ENV"]; + if (envFilePath) { + const delimeter = Math.random().toString(36).substring(2, 15); + const content = `${name}<<${delimeter}\n${value}\n${delimeter}\n`; + appendFileSync(outputPath, content); + } + } +} + +/** + * @typedef {object} SpawnOptions + * @property {string} [cwd] + * @property {number} [timeout] + * @property {Record} [env] + * @property {string} [stdout] + * @property {string} [stderr] + */ + +/** + * @typedef {object} SpawnResult + * @property {number} exitCode + * @property {number} [signalCode] + * @property {string} stdout + * @property {string} stderr + * @property {Error} [error] + */ + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {Promise} + */ +export async function spawn(command, options = {}) { + debugLog("$", ...command); + + const [cmd, ...args] = command; + const spawnOptions = { + cwd: options["cwd"] ?? process.cwd(), + timeout: options["timeout"] ?? undefined, + env: options["env"] ?? undefined, + stdio: ["ignore", "pipe", "pipe"], + ...options, + }; + + let exitCode = 1; + let signalCode; + let stdout = ""; + let stderr = ""; + let error; + + const result = new Promise((resolve, reject) => { + const subprocess = nodeSpawn(cmd, args, spawnOptions); + + subprocess.stdout?.on("data", chunk => { + stdout += chunk; + }); + subprocess.stderr?.on("data", chunk => { + stderr += chunk; + }); + + subprocess.on("error", error => reject(error)); + subprocess.on("exit", (code, signal) => { + exitCode = code; + signalCode = signal; + resolve(); + }); + }); + + try { + await result; + } catch (cause) { + error = cause; + } + + if (exitCode !== 0 && isWindows) { + const exitReason = getWindowsExitReason(exitCode); + if (exitReason) { + exitCode = exitReason; + } + } + + if (error || signalCode || exitCode !== 0) { + const description = command.map(arg => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg)).join(" "); + const cause = error || stderr.trim() || stdout.trim() || undefined; + + if (signalCode) { + error = new Error(`Command killed with ${signalCode}: ${description}`, { cause }); + } else { + error = new Error(`Command exited with code ${exitCode}: ${description}`, { cause }); + } + } + + return { + exitCode, + signalCode, + stdout, + stderr, + error, + }; +} + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {Promise} + */ +export async function spawnSafe(command, options) { + const result = await spawn(command, options); + + const { error } = result; + if (error) { + throw error; + } + + return result; +} + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {SpawnResult} + */ +export function spawnSync(command, options = {}) { + debugLog("$", ...command); + + const [cmd, ...args] = command; + const spawnOptions = { + cwd: options["cwd"] ?? process.cwd(), + timeout: options["timeout"] ?? undefined, + env: options["env"] ?? undefined, + stdio: ["ignore", "pipe", "pipe"], + ...options, + }; + + let exitCode = 1; + let signalCode; + let stdout = ""; + let stderr = ""; + let error; + + let result; + try { + result = nodeSpawnSync(cmd, args, spawnOptions); + } catch (error) { + result = { error }; + } + + const { error: spawnError, status, signal, stdout: stdoutBuffer, stderr: stderrBuffer } = result; + if (spawnError) { + error = spawnError; + } else { + exitCode = status ?? 1; + signalCode = signal || undefined; + stdout = stdoutBuffer.toString(); + stderr = stderrBuffer.toString(); + } + + if (exitCode !== 0 && isWindows) { + const exitReason = getWindowsExitReason(exitCode); + if (exitReason) { + exitCode = exitReason; + } + } + + if (error || signalCode || exitCode !== 0) { + const description = command.map(arg => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg)).join(" "); + const cause = error || stderr.trim() || stdout.trim() || undefined; + + if (signalCode) { + error = new Error(`Command killed with ${signalCode}: ${description}`, { cause }); + } else { + error = new Error(`Command exited with code ${exitCode}: ${description}`, { cause }); + } + } + + return { + exitCode, + signalCode, + stdout, + stderr, + error, + }; +} + +/** + * @param {string[]} command + * @param {SpawnOptions} options + * @returns {SpawnResult} + */ +export function spawnSyncSafe(command, options) { + const result = spawnSync(command, options); + + const { error } = result; + if (error) { + throw error; + } + + return result; +} + +/** + * @param {number} exitCode + * @returns {string | undefined} + */ +export function getWindowsExitReason(exitCode) { + const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h"; + const nthStatus = readFile(ntStatusPath, { cache: true }); + + const match = nthStatus.match(new RegExp(`(STATUS_\\w+).*0x${exitCode?.toString(16)}`, "i")); + if (match) { + const [, exitReason] = match; + return exitReason; + } +} + +/** + * @param {string} url + * @returns {URL} + */ +export function parseGitUrl(url) { + const string = typeof url === "string" ? url : url.toString(); + + const githubUrl = getEnv("GITHUB_SERVER_URL", false) || "https://github.com"; + if (/^git@github\.com:/.test(string)) { + return new URL(string.slice(15).replace(/\.git$/, ""), githubUrl); + } + if (/^https:\/\/github\.com\//.test(string)) { + return new URL(string.slice(19).replace(/\.git$/, ""), githubUrl); + } + + throw new Error(`Unsupported git url: ${string}`); +} + +/** + * @param {string} [cwd] + * @returns {URL | undefined} + */ +export function getRepositoryUrl(cwd) { + if (!cwd) { + if (isBuildkite) { + const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO", false); + if (repository) { + return parseGitUrl(repository); + } + } + + if (isGithubAction) { + const serverUrl = getEnv("GITHUB_SERVER_URL", false) || "https://github.com"; + const repository = getEnv("GITHUB_REPOSITORY", false); + if (serverUrl && repository) { + return parseGitUrl(new URL(repository, serverUrl)); + } + } + } + + const { error, stdout } = spawnSync(["git", "remote", "get-url", "origin"], { cwd }); + if (!error) { + return parseGitUrl(stdout.trim()); + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getRepository(cwd) { + if (!cwd) { + if (isGithubAction) { + const repository = getEnv("GITHUB_REPOSITORY", false); + if (repository) { + return repository; + } + } + } + + const url = getRepositoryUrl(cwd); + if (url) { + const { hostname, pathname } = new URL(url); + if (hostname == "github.com") { + return pathname.slice(1); + } + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getCommit(cwd) { + if (!cwd) { + if (isBuildkite) { + const commit = getEnv("BUILDKITE_COMMIT", false); + if (commit) { + return commit; + } + } + + if (isGithubAction) { + const commit = getEnv("GITHUB_SHA", false); + if (commit) { + return commit; + } + } + } + + const { error, stdout } = spawnSync(["git", "rev-parse", "HEAD"], { cwd }); + if (!error) { + return stdout.trim(); + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getCommitMessage(cwd) { + if (!cwd) { + if (isBuildkite) { + const message = getEnv("BUILDKITE_MESSAGE", false); + if (message) { + return message; + } + } + } + + const { error, stdout } = spawnSync(["git", "log", "-1", "--pretty=%B"], { cwd }); + if (!error) { + return stdout.trim(); + } +} + +/** + * @param {string} [cwd] + * @returns {string | undefined} + */ +export function getBranch(cwd) { + if (!cwd) { + if (isBuildkite) { + const branch = getEnv("BUILDKITE_BRANCH", false); + if (branch) { + return branch; + } + } + + if (isGithubAction) { + const ref = getEnv("GITHUB_REF_NAME", false); + if (ref) { + return ref; + } + } + } + + const { error, stdout } = spawnSync(["git", "rev-parse", "--abbrev-ref", "HEAD"], { cwd }); + if (!error) { + return stdout.trim(); + } +} + +/** + * @param {string} [cwd] + * @returns {string} + */ +export function getMainBranch(cwd) { + if (!cwd) { + if (isBuildkite) { + const branch = getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false); + if (branch) { + return branch; + } + } + + if (isGithubAction) { + const headRef = getEnv("GITHUB_HEAD_REF", false); + if (headRef) { + return headRef; + } + } + } + + const { error, stdout } = spawnSync(["git", "symbolic-ref", "refs/remotes/origin/HEAD"], { cwd }); + if (!error) { + return stdout.trim().replace("refs/remotes/origin/", ""); + } +} + +/** + * @param {string} [cwd] + * @returns {boolean} + */ +export function isMainBranch(cwd) { + return !isFork(cwd) && getBranch(cwd) === getMainBranch(cwd); +} + +/** + * @returns {boolean} + */ +export function isPullRequest() { + if (isBuildkite) { + return getEnv("BUILDKITE_PULL_REQUEST", false) === "true"; + } + + if (isGithubAction) { + return /pull_request|merge_group/.test(getEnv("GITHUB_EVENT_NAME", false)); + } + + return false; +} + +/** + * @returns {number | undefined} + */ +export function getPullRequest() { + if (isBuildkite) { + const pullRequest = getEnv("BUILDKITE_PULL_REQUEST", false); + if (pullRequest) { + return parseInt(pullRequest); + } + } + + if (isGithubAction) { + const eventPath = getEnv("GITHUB_EVENT_PATH", false); + if (eventPath && existsSync(eventPath)) { + const event = JSON.parse(readFile(eventPath, { cache: true })); + const pullRequest = event["pull_request"]; + if (pullRequest) { + return parseInt(pullRequest["number"]); + } + } + } +} + +/** + * @returns {string | undefined} + */ +export function getTargetBranch() { + if (isPullRequest()) { + if (isBuildkite) { + return getEnv("BUILDKITE_PULL_REQUEST_BASE_BRANCH", false); + } + + if (isGithubAction) { + return getEnv("GITHUB_BASE_REF", false); + } + } +} + +/** + * @returns {boolean} + */ +export function isFork() { + if (isBuildkite) { + const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false); + return !!repository && repository !== getEnv("BUILDKITE_REPO", false); + } + + if (isGithubAction) { + const eventPath = getEnv("GITHUB_EVENT_PATH", false); + if (eventPath && existsSync(eventPath)) { + const event = JSON.parse(readFile(eventPath, { cache: true })); + const pullRequest = event["pull_request"]; + if (pullRequest) { + return !!pullRequest["head"]["repo"]["fork"]; + } + } + } + + return false; +} + +/** + * @param {string} [cwd] + * @returns {boolean} + */ +export function isMergeQueue(cwd) { + return /^gh-readonly-queue/.test(getBranch(cwd)); +} + +/** + * @returns {string | undefined} + */ +export function getGithubToken() { + const cachedToken = getSecret("GITHUB_TOKEN", { required: false }); + + if (typeof cachedToken === "string") { + return cachedToken || undefined; + } + + const { error, stdout } = spawnSync(["gh", "auth", "token"]); + const token = error ? "" : stdout.trim(); + + setEnv("GITHUB_TOKEN", token); + return token || undefined; +} + +/** + * @typedef {object} CurlOptions + * @property {string} [method] + * @property {string} [body] + * @property {Record} [headers] + * @property {number} [timeout] + * @property {number} [retries] + * @property {boolean} [json] + * @property {boolean} [arrayBuffer] + * @property {string} [filename] + */ + +/** + * @typedef {object} CurlResult + * @property {number} status + * @property {string} statusText + * @property {Error | undefined} error + * @property {any} body + */ + +/** + * @param {string} url + * @param {CurlOptions} [options] + * @returns {Promise} + */ +export async function curl(url, options = {}) { + let { hostname, href } = new URL(url); + let method = options["method"] || "GET"; + let input = options["body"]; + let headers = options["headers"] || {}; + let retries = options["retries"] || 3; + let json = options["json"]; + let arrayBuffer = options["arrayBuffer"]; + let filename = options["filename"]; + + if (typeof headers["Authorization"] === "undefined") { + if (hostname === "api.github.com" || hostname === "uploads.github.com") { + const githubToken = getGithubToken(); + if (githubToken) { + headers["Authorization"] = `Bearer ${githubToken}`; + } + } + } + + let status; + let statusText; + let body; + let error; + for (let i = 0; i < retries; i++) { + if (i > 0) { + await new Promise(resolve => setTimeout(resolve, 1000 * (i + 1))); + } + + let response; + try { + response = await fetch(href, { method, headers, body: input }); + } catch (cause) { + debugLog("$", "curl", href, "-> error"); + error = new Error(`Fetch failed: ${method} ${url}`, { cause }); + continue; + } + + status = response["status"]; + statusText = response["statusText"]; + debugLog("$", "curl", href, "->", status, statusText); + + const ok = response["ok"]; + try { + if (filename && ok) { + const buffer = await response.arrayBuffer(); + await writeFile(filename, new Uint8Array(buffer)); + } else if (arrayBuffer && ok) { + body = await response.arrayBuffer(); + } else if (json && ok) { + body = await response.json(); + } else { + body = await response.text(); + } + } catch (cause) { + error = new Error(`Fetch failed: ${method} ${url}`, { cause }); + continue; + } + + if (response["ok"]) { + break; + } + + error = new Error(`Fetch failed: ${method} ${url}: ${status} ${statusText}`, { cause: body }); + + if (status === 400 || status === 404 || status === 422) { + break; + } + } + + return { + status, + statusText, + error, + body, + }; +} + +/** + * @param {string} url + * @param {CurlOptions} options + * @returns {Promise} + */ +export async function curlSafe(url, options) { + const result = await curl(url, options); + + const { error, body } = result; + if (error) { + throw error; + } + + return body; +} + +let cachedFiles; + +/** + * @param {string} filename + * @param {object} [options] + * @param {boolean} [options.cache] + * @returns {string} + */ +export function readFile(filename, options = {}) { + const absolutePath = resolve(filename); + if (options["cache"]) { + if (cachedFiles?.[absolutePath]) { + return cachedFiles[absolutePath]; + } + } + + const relativePath = relative(process.cwd(), absolutePath); + debugLog("cat", relativePath); + + let content; + try { + content = readFileSync(absolutePath, "utf-8"); + } catch (cause) { + throw new Error(`Read failed: ${relativePath}`, { cause }); + } + + if (options["cache"]) { + cachedFiles ||= {}; + cachedFiles[absolutePath] = content; + } + + return content; +} + +/** + * @param {string} [cwd] + * @param {string} [base] + * @param {string} [head] + * @returns {Promise} + */ +export async function getChangedFiles(cwd, base, head) { + const repository = getRepository(cwd); + base ||= getCommit(cwd); + head ||= `${base}^1`; + + const url = `https://api.github.com/repos/${repository}/compare/${head}...${base}`; + const { error, body } = await curl(url, { json: true }); + + if (error) { + console.warn("Failed to list changed files:", error); + return; + } + + const { files } = body; + return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename); +} + +/** + * @param {string} filename + * @returns {boolean} + */ +export function isDocumentation(filename) { + if (/^(docs|bench|examples|misctools|\.vscode)/.test(filename)) { + return true; + } + + if (!/^(src|test|vendor)/.test(filename) && /\.(md|txt)$/.test(filename)) { + return true; + } + + return false; +} + +/** + * @returns {string | undefined} + */ +export function getBuildId() { + if (isBuildkite) { + return getEnv("BUILDKITE_BUILD_ID"); + } + + if (isGithubAction) { + return getEnv("GITHUB_RUN_ID"); + } +} + +/** + * @returns {number | undefined} + */ +export function getBuildNumber() { + if (isBuildkite) { + return parseInt(getEnv("BUILDKITE_BUILD_NUMBER")); + } + + if (isGithubAction) { + return parseInt(getEnv("GITHUB_RUN_ID")); + } +} + +/** + * @returns {URL | undefined} + */ +export function getBuildUrl() { + if (isBuildkite) { + const buildUrl = getEnv("BUILDKITE_BUILD_URL"); + const jobId = getEnv("BUILDKITE_JOB_ID"); + return new URL(`#${jobId}`, buildUrl); + } + + if (isGithubAction) { + const baseUrl = getEnv("GITHUB_SERVER_URL", false) || "https://github.com"; + const repository = getEnv("GITHUB_REPOSITORY"); + const runId = getEnv("GITHUB_RUN_ID"); + return new URL(`${repository}/actions/runs/${runId}`, baseUrl); + } +} + +/** + * @returns {string | undefined} + */ +export function getBuildLabel() { + if (isBuildkite) { + const label = getEnv("BUILDKITE_GROUP_LABEL", false) || getEnv("BUILDKITE_LABEL", false); + if (label) { + return label; + } + } + + if (isGithubAction) { + const label = getEnv("GITHUB_WORKFLOW", false); + if (label) { + return label; + } + } +} + +/** + * @typedef {object} BuildArtifact + * @property {string} [job] + * @property {string} filename + * @property {string} url + */ + +/** + * @returns {Promise} + */ +export async function getBuildArtifacts() { + const buildId = await getBuildkiteBuildNumber(); + if (buildId) { + return getBuildkiteArtifacts(buildId); + } +} + +/** + * @returns {Promise} + */ +export async function getBuildkiteBuildNumber() { + if (isBuildkite) { + const number = parseInt(getEnv("BUILDKITE_BUILD_NUMBER", false)); + if (!isNaN(number)) { + return number; + } + } + + const repository = getRepository(); + const commit = getCommit(); + if (!repository || !commit) { + return; + } + + const { status, error, body } = await curl(`https://api.github.com/repos/${repository}/commits/${commit}/statuses`, { + json: true, + }); + if (status === 404) { + return; + } + if (error) { + throw error; + } + + for (const { target_url: url } of body) { + const { hostname, pathname } = new URL(url); + if (hostname === "buildkite.com") { + const buildId = parseInt(pathname.split("/").pop()); + if (!isNaN(buildId)) { + return buildId; + } + } + } +} + +/** + * @param {string} buildId + * @returns {Promise} + */ +export async function getBuildkiteArtifacts(buildId) { + const orgId = getEnv("BUILDKITE_ORGANIZATION_SLUG", false) || "bun"; + const pipelineId = getEnv("BUILDKITE_PIPELINE_SLUG", false) || "bun"; + const { jobs } = await curlSafe(`https://buildkite.com/${orgId}/${pipelineId}/builds/${buildId}.json`, { + json: true, + }); + + const artifacts = await Promise.all( + jobs.map(async ({ id: jobId, step_key: jobKey }) => { + const artifacts = await curlSafe( + `https://buildkite.com/organizations/${orgId}/pipelines/${pipelineId}/builds/${buildId}/jobs/${jobId}/artifacts`, + { json: true }, + ); + + return artifacts.map(({ path, url }) => { + return { + job: jobKey, + filename: path, + url: new URL(url, "https://buildkite.com/").toString(), + }; + }); + }), + ); + + return artifacts.flat(); +} + +/** + * @param {string} [filename] + * @param {number} [line] + * @returns {URL | undefined} + */ +export function getFileUrl(filename, line) { + let cwd; + if (filename?.startsWith("vendor")) { + const parentPath = resolve(dirname(filename)); + const { error, stdout } = spawnSync(["git", "rev-parse", "--show-toplevel"], { cwd: parentPath }); + if (error) { + return; + } + cwd = stdout.trim(); + } + + const baseUrl = getRepositoryUrl(cwd); + if (!filename) { + return baseUrl; + } + + const filePath = (cwd ? relative(cwd, filename) : filename).replace(/\\/g, "/"); + const pullRequest = getPullRequest(); + + if (pullRequest) { + const fileMd5 = createHash("sha256").update(filePath).digest("hex"); + const url = new URL(`pull/${pullRequest}/files#diff-${fileMd5}`, `${baseUrl}/`); + if (typeof line !== "undefined") { + return new URL(`R${line}`, url); + } + return url; + } + + const commit = getCommit(cwd); + const url = new URL(`blob/${commit}/${filePath}`, `${baseUrl}/`).toString(); + if (typeof line !== "undefined") { + return new URL(`#L${line}`, url); + } + return url; +} + +/** + * @typedef {object} BuildkiteBuild + * @property {string} id + * @property {string} commit_id + * @property {string} branch_name + */ + +/** + * @returns {Promise} + */ +export async function getLastSuccessfulBuild() { + if (isBuildkite) { + let depth = 0; + let url = getBuildUrl(); + if (url) { + url.hash = ""; + } + + while (url) { + const { error, body } = await curl(`${url}.json`, { json: true }); + if (error) { + return; + } + + const { state, prev_branch_build: previousBuild, steps } = body; + if (depth++) { + if (state === "failed" || state === "passed" || state === "canceled") { + const buildSteps = steps.filter(({ label }) => label.endsWith("build-bun")); + if (buildSteps.length) { + if (buildSteps.every(({ outcome }) => outcome === "passed")) { + return body; + } + return; + } + } + } + + if (!previousBuild) { + return; + } + + url = new URL(previousBuild["url"], url); + } + } +} + +/** + * @param {string} string + * @returns {string} + */ +export function stripAnsi(string) { + return string.replace(/\u001b\[\d+m/g, ""); +} + +/** + * @param {string} string + * @returns {string} + */ +export function escapeGitHubAction(string) { + return string.replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A"); +} + +/** + * @param {string} string + * @returns {string} + */ +export function unescapeGitHubAction(string) { + return string.replace(/%25/g, "%").replace(/%0D/g, "\r").replace(/%0A/g, "\n"); +} + +/** + * @param {string} string + * @returns {string} + */ +export function escapeHtml(string) { + return string + .replace(/&/g, "&") + .replace(//g, ">") + .replace(/"/g, """) + .replace(/'/g, "'") + .replace(/`/g, "`"); +} + +/** + * @param {string} string + * @returns {string} + */ +export function escapeCodeBlock(string) { + return string.replace(/`/g, "\\`"); +} + +/** + * @returns {string} + */ +export function tmpdir() { + if (isWindows) { + for (const key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP", "RUNNER_TEMP"]) { + const tmpdir = getEnv(key, false); + if (!tmpdir || /cygwin|cygdrive/i.test(tmpdir) || !/^[a-z]/i.test(tmpdir)) { + continue; + } + return normalizeWindows(tmpdir); + } + + const appData = process.env["LOCALAPPDATA"]; + if (appData) { + const appDataTemp = join(appData, "Temp"); + if (existsSync(appDataTemp)) { + return appDataTemp; + } + } + } + + if (isMacOS || isLinux) { + if (existsSync("/tmp")) { + return "/tmp"; + } + } + + return nodeTmpdir(); +} + +/** + * @param {string} string + * @returns {string} + */ +function escapePowershell(string) { + return string.replace(/'/g, "''").replace(/`/g, "``"); +} + +/** + * @param {string} filename + * @param {string} [output] + * @returns {Promise} + */ +export async function unzip(filename, output) { + const destination = output || mkdtempSync(join(tmpdir(), "unzip-")); + if (isWindows) { + const command = `Expand-Archive -Force -LiteralPath "${escapePowershell(filename)}" -DestinationPath "${escapePowershell(destination)}"`; + await spawnSafe(["powershell", "-Command", command]); + } else { + await spawnSafe(["unzip", "-o", filename, "-d", destination]); + } + return destination; +} + +/** + * @param {string} string + * @returns {"darwin" | "linux" | "windows"} + */ +export function parseOs(string) { + if (/darwin|apple|mac/i.test(string)) { + return "darwin"; + } + if (/linux/i.test(string)) { + return "linux"; + } + if (/win/i.test(string)) { + return "windows"; + } + throw new Error(`Unsupported operating system: ${string}`); +} + +/** + * @returns {"darwin" | "linux" | "windows"} + */ +export function getOs() { + return parseOs(process.platform); +} + +/** + * @param {string} string + * @returns {"x64" | "aarch64"} + */ +export function parseArch(string) { + if (/x64|amd64|x86_64/i.test(string)) { + return "x64"; + } + if (/arm64|aarch64/i.test(string)) { + return "aarch64"; + } + throw new Error(`Unsupported architecture: ${string}`); +} + +/** + * @returns {"x64" | "aarch64"} + */ +export function getArch() { + return parseArch(process.arch); +} + +/** + * @returns {"musl" | "gnu" | undefined} + */ +export function getAbi() { + if (isLinux) { + const arch = getArch() === "x64" ? "x86_64" : "aarch64"; + const muslLibPath = `/lib/ld-musl-${arch}.so.1`; + if (existsSync(muslLibPath)) { + return "musl"; + } + + const gnuLibPath = `/lib/ld-linux-${arch}.so.2`; + if (existsSync(gnuLibPath)) { + return "gnu"; + } + } +} + +/** + * @typedef {object} Target + * @property {"darwin" | "linux" | "windows"} os + * @property {"x64" | "aarch64"} arch + * @property {"musl"} [abi] + * @property {boolean} [baseline] + * @property {boolean} profile + * @property {string} label + */ + +/** + * @param {string} string + * @returns {Target} + */ +export function parseTarget(string) { + const os = parseOs(string); + const arch = parseArch(string); + const abi = os === "linux" && string.includes("-musl") ? "musl" : undefined; + const baseline = arch === "x64" ? string.includes("-baseline") : undefined; + const profile = string.includes("-profile"); + + let label = `${os}-${arch}`; + if (abi) { + label += `-${abi}`; + } + if (baseline) { + label += "-baseline"; + } + if (profile) { + label += "-profile"; + } + + return { label, os, arch, abi, baseline, profile }; +} + +/** + * @param {string} target + * @param {string} [release] + * @returns {Promise} + */ +export async function getTargetDownloadUrl(target, release) { + const { label, os, arch, abi, baseline } = parseTarget(target); + const baseUrl = "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/"; + const filename = `bun-${label}.zip`; + + const exists = async url => { + const { status } = await curl(url, { method: "HEAD" }); + return status !== 404; + }; + + if (!release || /^(stable|latest|canary)$/i.test(release)) { + const tag = release === "canary" ? "canary" : "latest"; + const url = new URL(`${tag}/${filename}`, baseUrl); + if (await exists(url)) { + return url; + } + } + + if (/^(bun-v|v)?(\d+\.\d+\.\d+)$/i.test(release)) { + const [, major, minor, patch] = /(\d+)\.(\d+)\.(\d+)/i.exec(release); + const url = new URL(`bun-v${major}.${minor}.${patch}/${filename}`, baseUrl); + if (await exists(url)) { + return url; + } + } + + if (/^https?:\/\//i.test(release) && (await exists(release))) { + return new URL(release); + } + + if (release.length === 40 && /^[0-9a-f]{40}$/i.test(release)) { + const releaseUrl = new URL(`${release}/${filename}`, baseUrl); + if (await exists(releaseUrl)) { + return releaseUrl; + } + + const canaryUrl = new URL(`${release}-canary/${filename}`, baseUrl); + if (await exists(canaryUrl)) { + return canaryUrl; + } + + const statusUrl = new URL(`https://api.github.com/repos/oven-sh/bun/commits/${release}/status`).toString(); + const { error, body } = await curl(statusUrl, { json: true }); + if (error) { + throw new Error(`Failed to fetch commit status: ${release}`, { cause: error }); + } + + const { statuses } = body; + const buildUrls = new Set(); + for (const { target_url: url } of statuses) { + const { hostname, origin, pathname } = new URL(url); + if (hostname === "buildkite.com") { + buildUrls.add(`${origin}${pathname}.json`); + } + } + + const buildkiteUrl = new URL("https://buildkite.com/"); + for (const url of buildUrls) { + const { status, error, body } = await curl(url, { json: true }); + if (status === 404) { + continue; + } + if (error) { + throw new Error(`Failed to fetch build: ${url}`, { cause: error }); + } + + const { jobs } = body; + const job = jobs.find( + ({ step_key: key }) => + key && + key.includes("build-bun") && + key.includes(os) && + key.includes(arch) && + (!baseline || key.includes("baseline")) && + (!abi || key.includes(abi)), + ); + if (!job) { + continue; + } + + const { base_path: jobPath } = job; + const artifactsUrl = new URL(`${jobPath}/artifacts`, buildkiteUrl); + { + const { error, body } = await curl(artifactsUrl, { json: true }); + if (error) { + continue; + } + + for (const { url, file_name: name } of body) { + if (name === filename) { + return new URL(url, artifactsUrl); + } + } + } + } + } + + throw new Error(`Failed to find release: ${release}`); +} + +/** + * @param {string} target + * @param {string} [release] + * @returns {Promise} + */ +export async function downloadTarget(target, release) { + const url = await getTargetDownloadUrl(target, release); + const { error, body } = await curl(url, { arrayBuffer: true }); + if (error) { + throw new Error(`Failed to download target: ${target} at ${release}`, { cause: error }); + } + + const tmpPath = mkdtempSync(join(tmpdir(), "bun-download-")); + const zipPath = join(tmpPath, "bun.zip"); + + writeFileSync(zipPath, new Uint8Array(body)); + const unzipPath = await unzip(zipPath, tmpPath); + + for (const entry of readdirSync(unzipPath, { recursive: true, encoding: "utf-8" })) { + const exePath = join(unzipPath, entry); + if (/bun(?:\.exe)?$/i.test(entry)) { + return exePath; + } + } + + throw new Error(`Failed to find bun executable: ${unzipPath}`); +} + +/** + * @returns {string | undefined} + */ +export function getTailscaleIp() { + let tailscale = "tailscale"; + if (isMacOS) { + const tailscaleApp = "/Applications/Tailscale.app/Contents/MacOS/tailscale"; + if (existsSync(tailscaleApp)) { + tailscale = tailscaleApp; + } + } + + const { error, stdout } = spawnSync([tailscale, "ip", "--1"]); + if (!error) { + return stdout.trim(); + } +} + +/** + * @returns {string | undefined} + */ +export function getPublicIp() { + for (const url of ["https://checkip.amazonaws.com", "https://ipinfo.io/ip"]) { + const { error, stdout } = spawnSync(["curl", url]); + if (!error) { + return stdout.trim(); + } + } +} + +/** + * @returns {string} + */ +export function getHostname() { + if (isBuildkite) { + const agent = getEnv("BUILDKITE_AGENT_NAME", false); + if (agent) { + return agent; + } + } + + if (isGithubAction) { + const runner = getEnv("RUNNER_NAME", false); + if (runner) { + return runner; + } + } + + return hostname(); +} + +/** + * @returns {string} + */ +export function getUsername() { + const { username } = userInfo(); + return username; +} + +/** + * @returns {string} + */ +export function getDistro() { + if (isMacOS) { + return "macOS"; + } + + if (isLinux) { + const releasePath = "/etc/os-release"; + if (existsSync(releasePath)) { + const releaseFile = readFile(releasePath, { cache: true }); + const match = releaseFile.match(/ID=\"(.*)\"/); + if (match) { + return match[1]; + } + } + + const { error, stdout } = spawnSync(["lsb_release", "-is"]); + if (!error) { + return stdout.trim(); + } + + return "Linux"; + } + + if (isWindows) { + const { error, stdout } = spawnSync(["cmd", "/c", "ver"]); + if (!error) { + return stdout.trim(); + } + + return "Windows"; + } + + return `${process.platform} ${process.arch}`; +} + +/** + * @returns {string | undefined} + */ +export function getDistroRelease() { + if (isMacOS) { + const { error, stdout } = spawnSync(["sw_vers", "-productVersion"]); + if (!error) { + return stdout.trim(); + } + } + + if (isLinux) { + const releasePath = "/etc/os-release"; + if (existsSync(releasePath)) { + const releaseFile = readFile(releasePath, { cache: true }); + const match = releaseFile.match(/VERSION_ID=\"(.*)\"/); + if (match) { + return match[1]; + } + } + + const { error, stdout } = spawnSync(["lsb_release", "-rs"]); + if (!error) { + return stdout.trim(); + } + } + + if (isWindows) { + const { error, stdout } = spawnSync(["cmd", "/c", "ver"]); + if (!error) { + return stdout.trim(); + } + } +} + +/** + * @returns {Promise} + */ +export async function getCanaryRevision() { + const repository = getRepository() || "oven-sh/bun"; + const { error: releaseError, body: release } = await curl( + new URL(`repos/${repository}/releases/latest`, getGithubApiUrl()), + { json: true }, + ); + if (releaseError) { + return 1; + } + + const commit = getCommit(); + const { tag_name: latest } = release; + const { error: compareError, body: compare } = await curl( + new URL(`repos/${repository}/compare/${latest}...${commit}`, getGithubApiUrl()), + { json: true }, + ); + if (compareError) { + return 1; + } + + const { ahead_by: revision } = compare; + if (typeof revision === "number") { + return revision; + } + + return 1; +} + +/** + * @returns {URL} + */ +export function getGithubApiUrl() { + return new URL(getEnv("GITHUB_API_URL", false) || "https://api.github.com"); +} + +/** + * @returns {URL} + */ +export function getGithubUrl() { + return new URL(getEnv("GITHUB_SERVER_URL", false) || "https://github.com"); +} + +/** + * @param {string} title + * @param {function} [fn] + */ +export function startGroup(title, fn) { + if (isGithubAction) { + console.log(`::group::${stripAnsi(title)}`); + } else if (isBuildkite) { + console.log(`--- ${title}`); + } else { + console.group(title); + } + + if (typeof fn === "function") { + let result; + try { + result = fn(); + } finally { + if (result instanceof Promise) { + return result.finally(() => endGroup()); + } else { + endGroup(); + } + } + } +} + +export function endGroup() { + if (isGithubAction) { + console.log("::endgroup::"); + } else { + console.groupEnd(); + } +} + +export function printEnvironment() { + startGroup("Machine", () => { + console.log("Operating System:", getOs()); + console.log("Architecture:", getArch()); + if (isLinux) { + console.log("ABI:", getAbi()); + } + console.log("Distro:", getDistro()); + console.log("Release:", getDistroRelease()); + console.log("Hostname:", getHostname()); + if (isCI) { + console.log("Tailscale IP:", getTailscaleIp()); + console.log("Public IP:", getPublicIp()); + } + console.log("Username:", getUsername()); + console.log("Working Directory:", process.cwd()); + console.log("Temporary Directory:", tmpdir()); + }); + + if (isCI) { + startGroup("Environment", () => { + for (const [key, value] of Object.entries(process.env)) { + console.log(`${key}:`, value); + } + }); + } + + startGroup("Repository", () => { + console.log("Commit:", getCommit()); + console.log("Message:", getCommitMessage()); + console.log("Branch:", getBranch()); + console.log("Main Branch:", getMainBranch()); + console.log("Is Fork:", isFork()); + console.log("Is Merge Queue:", isMergeQueue()); + console.log("Is Main Branch:", isMainBranch()); + console.log("Is Pull Request:", isPullRequest()); + if (isPullRequest()) { + console.log("Pull Request:", getPullRequest()); + console.log("Target Branch:", getTargetBranch()); + } + }); + + if (isCI) { + startGroup("CI", () => { + console.log("Build ID:", getBuildId()); + console.log("Build Label:", getBuildLabel()); + console.log("Build URL:", `${getBuildUrl()}`); + }); + } +} diff --git a/src/bun.js/bindings/InternalModuleRegistry.cpp b/src/bun.js/bindings/InternalModuleRegistry.cpp index 21d26d06e7..fc3407c702 100644 --- a/src/bun.js/bindings/InternalModuleRegistry.cpp +++ b/src/bun.js/bindings/InternalModuleRegistry.cpp @@ -6,7 +6,7 @@ #include #include #include - +#include #include #include "InternalModuleRegistryConstants.h" @@ -54,6 +54,9 @@ JSC::JSValue generateModule(JSC::JSGlobalObject* globalObject, JSC::VM& vm, cons static_cast(globalObject)); RETURN_IF_EXCEPTION(throwScope, {}); + if (UNLIKELY(globalObject->hasDebugger() && globalObject->debugger()->isInteractivelyDebugging())) { + globalObject->debugger()->sourceParsed(globalObject, source.provider(), -1, ""_s); + } JSC::MarkedArgumentBuffer argList; JSValue result = JSC::profiledCall( diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index c27ab89c04..8a20bebea1 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -3443,6 +3443,42 @@ JSC_DEFINE_CUSTOM_SETTER(EventSource_setter, return true; } +JSC_DEFINE_HOST_FUNCTION(jsFunctionToClass, (JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame)) +{ + // Mimick the behavior of class Foo {} for a regular JSFunction. + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + auto target = callFrame->argument(0).toObject(globalObject); + auto name = callFrame->argument(1); + JSObject* base = callFrame->argument(2).getObject(); + JSObject* prototypeBase = nullptr; + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + if (!base) { + base = globalObject->functionPrototype(); + } else if (auto proto = base->getIfPropertyExists(globalObject, vm.propertyNames->prototype)) { + if (auto protoObject = proto.getObject()) { + prototypeBase = protoObject; + } + } else { + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + JSC::throwTypeError(globalObject, scope, "Base class must have a prototype property"_s); + return encodedJSValue(); + } + + JSObject* prototype = prototypeBase ? JSC::constructEmptyObject(globalObject, prototypeBase) : JSC::constructEmptyObject(globalObject); + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + prototype->structure()->setMayBePrototype(true); + prototype->putDirect(vm, vm.propertyNames->constructor, target, PropertyAttribute::DontEnum | 0); + + target->setPrototypeDirect(vm, base); + target->putDirect(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | 0); + target->putDirect(vm, vm.propertyNames->name, name, PropertyAttribute::DontEnum | 0); + + return JSValue::encode(jsUndefined()); +} + EncodedJSValue GlobalObject::assignToStream(JSValue stream, JSValue controller) { JSC::VM& vm = this->vm(); @@ -3544,6 +3580,7 @@ void GlobalObject::addBuiltinGlobals(JSC::VM& vm) GlobalPropertyInfo(builtinNames.requireMapPrivateName(), this->requireMap(), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly | 0), GlobalPropertyInfo(builtinNames.TextEncoderStreamEncoderPrivateName(), JSTextEncoderStreamEncoderConstructor(), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly | 0), GlobalPropertyInfo(builtinNames.makeErrorWithCodePrivateName(), JSFunction::create(vm, this, 2, String(), jsFunctionMakeErrorWithCode, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), + GlobalPropertyInfo(builtinNames.toClassPrivateName(), JSFunction::create(vm, this, 1, String(), jsFunctionToClass, ImplementationVisibility::Public), PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly), }; addStaticGlobals(staticGlobals, std::size(staticGlobals)); diff --git a/src/bun.js/node/node_os.zig b/src/bun.js/node/node_os.zig index fbba75919b..c18a7b5860 100644 --- a/src/bun.js/node/node_os.zig +++ b/src/bun.js/node/node_os.zig @@ -14,22 +14,22 @@ pub const OS = struct { pub fn create(globalObject: *JSC.JSGlobalObject) JSC.JSValue { const module = JSC.JSValue.createEmptyObject(globalObject, 16); - module.put(globalObject, JSC.ZigString.static("cpus"), JSC.NewFunction(globalObject, JSC.ZigString.static("cpus"), 0, cpus, true)); - module.put(globalObject, JSC.ZigString.static("freemem"), JSC.NewFunction(globalObject, JSC.ZigString.static("freemem"), 0, freemem, true)); - module.put(globalObject, JSC.ZigString.static("getPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("getPriority"), 1, getPriority, true)); - module.put(globalObject, JSC.ZigString.static("homedir"), JSC.NewFunction(globalObject, JSC.ZigString.static("homedir"), 0, homedir, true)); - module.put(globalObject, JSC.ZigString.static("hostname"), JSC.NewFunction(globalObject, JSC.ZigString.static("hostname"), 0, hostname, true)); - module.put(globalObject, JSC.ZigString.static("loadavg"), JSC.NewFunction(globalObject, JSC.ZigString.static("loadavg"), 0, loadavg, true)); - module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, true)); - module.put(globalObject, JSC.ZigString.static("networkInterfaces"), JSC.NewFunction(globalObject, JSC.ZigString.static("networkInterfaces"), 0, networkInterfaces, true)); - module.put(globalObject, JSC.ZigString.static("release"), JSC.NewFunction(globalObject, JSC.ZigString.static("release"), 0, release, true)); - module.put(globalObject, JSC.ZigString.static("setPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("setPriority"), 2, setPriority, true)); - module.put(globalObject, JSC.ZigString.static("totalmem"), JSC.NewFunction(globalObject, JSC.ZigString.static("totalmem"), 0, totalmem, true)); - module.put(globalObject, JSC.ZigString.static("type"), JSC.NewFunction(globalObject, JSC.ZigString.static("type"), 0, OS.type, true)); - module.put(globalObject, JSC.ZigString.static("uptime"), JSC.NewFunction(globalObject, JSC.ZigString.static("uptime"), 0, uptime, true)); - module.put(globalObject, JSC.ZigString.static("userInfo"), JSC.NewFunction(globalObject, JSC.ZigString.static("userInfo"), 0, userInfo, true)); - module.put(globalObject, JSC.ZigString.static("version"), JSC.NewFunction(globalObject, JSC.ZigString.static("version"), 0, version, true)); - module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, true)); + module.put(globalObject, JSC.ZigString.static("cpus"), JSC.NewFunction(globalObject, JSC.ZigString.static("cpus"), 0, cpus, false)); + module.put(globalObject, JSC.ZigString.static("freemem"), JSC.NewFunction(globalObject, JSC.ZigString.static("freemem"), 0, freemem, false)); + module.put(globalObject, JSC.ZigString.static("getPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("getPriority"), 1, getPriority, false)); + module.put(globalObject, JSC.ZigString.static("homedir"), JSC.NewFunction(globalObject, JSC.ZigString.static("homedir"), 0, homedir, false)); + module.put(globalObject, JSC.ZigString.static("hostname"), JSC.NewFunction(globalObject, JSC.ZigString.static("hostname"), 0, hostname, false)); + module.put(globalObject, JSC.ZigString.static("loadavg"), JSC.NewFunction(globalObject, JSC.ZigString.static("loadavg"), 0, loadavg, false)); + module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, false)); + module.put(globalObject, JSC.ZigString.static("networkInterfaces"), JSC.NewFunction(globalObject, JSC.ZigString.static("networkInterfaces"), 0, networkInterfaces, false)); + module.put(globalObject, JSC.ZigString.static("release"), JSC.NewFunction(globalObject, JSC.ZigString.static("release"), 0, release, false)); + module.put(globalObject, JSC.ZigString.static("setPriority"), JSC.NewFunction(globalObject, JSC.ZigString.static("setPriority"), 2, setPriority, false)); + module.put(globalObject, JSC.ZigString.static("totalmem"), JSC.NewFunction(globalObject, JSC.ZigString.static("totalmem"), 0, totalmem, false)); + module.put(globalObject, JSC.ZigString.static("type"), JSC.NewFunction(globalObject, JSC.ZigString.static("type"), 0, OS.type, false)); + module.put(globalObject, JSC.ZigString.static("uptime"), JSC.NewFunction(globalObject, JSC.ZigString.static("uptime"), 0, uptime, false)); + module.put(globalObject, JSC.ZigString.static("userInfo"), JSC.NewFunction(globalObject, JSC.ZigString.static("userInfo"), 0, userInfo, false)); + module.put(globalObject, JSC.ZigString.static("version"), JSC.NewFunction(globalObject, JSC.ZigString.static("version"), 0, version, false)); + module.put(globalObject, JSC.ZigString.static("machine"), JSC.NewFunction(globalObject, JSC.ZigString.static("machine"), 0, machine, false)); return module; } diff --git a/src/bundler.zig b/src/bundler.zig index e21e1b1f15..d45a508940 100644 --- a/src/bundler.zig +++ b/src/bundler.zig @@ -51,6 +51,7 @@ const Resolver = _resolver.Resolver; const TOML = @import("./toml/toml_parser.zig").TOML; const JSC = bun.JSC; const PackageManager = @import("./install/install.zig").PackageManager; +const DataURL = @import("./resolver/data_url.zig").DataURL; pub fn MacroJSValueType_() type { if (comptime JSC.is_bindgen) { @@ -1300,6 +1301,18 @@ pub const Bundler = struct { break :brk logger.Source.initPathString(path.text, ""); } + if (strings.startsWith(path.text, "data:")) { + const data_url = DataURL.parseWithoutCheck(path.text) catch |err| { + bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} parsing data url \"{s}\"", .{ @errorName(err), path.text }) catch {}; + return null; + }; + const body = data_url.decodeData(this_parse.allocator) catch |err| { + bundler.log.addErrorFmt(null, logger.Loc.Empty, bundler.allocator, "{s} decoding data \"{s}\"", .{ @errorName(err), path.text }) catch {}; + return null; + }; + break :brk logger.Source.initPathString(path.text, body); + } + const entry = bundler.resolver.caches.fs.readFileWithAllocator( if (use_shared_buffer) bun.fs_allocator else this_parse.allocator, bundler.fs, diff --git a/src/js/builtins.d.ts b/src/js/builtins.d.ts index 12d7d7e7f9..3e88689950 100644 --- a/src/js/builtins.d.ts +++ b/src/js/builtins.d.ts @@ -546,3 +546,16 @@ declare interface Error { */ declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedType: string, actualValue: string): TypeError; declare function $ERR_INVALID_ARG_TYPE(argName: string, expectedTypes: any[], actualValue: string): TypeError; +/** + * Convert a function to a class-like object. + * + * This does: + * - Sets the name of the function to the given name + * - Sets .prototype to Object.create(base?.prototype, { constructor: { value: fn } }) + * - Calls Object.setPrototypeOf(fn, base ?? Function.prototype) + * + * @param fn - The function to convert to a class + * @param name - The name of the class + * @param base - The base class to inherit from + */ +declare function $toClass(fn: Function, name: string, base?: Function | undefined | null); diff --git a/src/js/builtins/BunBuiltinNames.h b/src/js/builtins/BunBuiltinNames.h index eabe9df617..12fd5d6299 100644 --- a/src/js/builtins/BunBuiltinNames.h +++ b/src/js/builtins/BunBuiltinNames.h @@ -81,7 +81,6 @@ using namespace JSC; macro(encoding) \ macro(end) \ macro(errno) \ - macro(makeErrorWithCode) \ macro(errorSteps) \ macro(evaluateCommonJSModule) \ macro(evaluated) \ @@ -134,6 +133,7 @@ using namespace JSC; macro(localStreams) \ macro(main) \ macro(makeDOMException) \ + macro(makeErrorWithCode) \ macro(makeGetterTypeError) \ macro(makeThisTypeError) \ macro(method) \ @@ -152,8 +152,8 @@ using namespace JSC; macro(password) \ macro(patch) \ macro(path) \ - macro(paths) \ macro(pathname) \ + macro(paths) \ macro(pause) \ macro(pendingAbortRequest) \ macro(pendingPullIntos) \ @@ -227,6 +227,7 @@ using namespace JSC; macro(textEncoderStreamEncoder) \ macro(TextEncoderStreamEncoder) \ macro(textEncoderStreamTransform) \ + macro(toClass) \ macro(toNamespacedPath) \ macro(trace) \ macro(transformAlgorithm) \ diff --git a/src/js/node/os.ts b/src/js/node/os.ts index 87682a72a4..f962ed31e2 100644 --- a/src/js/node/os.ts +++ b/src/js/node/os.ts @@ -87,31 +87,40 @@ function lazyCpus({ cpus }) { // all logic based on `process.platform` and `process.arch` is inlined at bundle time function bound(obj) { return { - availableParallelism: () => navigator.hardwareConcurrency, - arch: () => process.arch, + availableParallelism: function () { + return navigator.hardwareConcurrency; + }, + arch: function () { + return process.arch; + }, cpus: lazyCpus(obj), - endianness: () => (process.arch === "arm64" || process.arch === "x64" ? "LE" : $bundleError("TODO: endianness")), + endianness: function () { + return process.arch === "arm64" || process.arch === "x64" ? "LE" : $bundleError("TODO: endianness"); + }, freemem: obj.freemem.bind(obj), getPriority: obj.getPriority.bind(obj), homedir: obj.homedir.bind(obj), hostname: obj.hostname.bind(obj), loadavg: obj.loadavg.bind(obj), networkInterfaces: obj.networkInterfaces.bind(obj), - platform: () => process.platform, + platform: function () { + return process.platform; + }, release: obj.release.bind(obj), setPriority: obj.setPriority.bind(obj), get tmpdir() { return tmpdir; }, totalmem: obj.totalmem.bind(obj), - type: () => - process.platform === "win32" + type: function () { + return process.platform === "win32" ? "Windows_NT" : process.platform === "darwin" ? "Darwin" : process.platform === "linux" ? "Linux" - : $bundleError("TODO: type"), + : $bundleError("TODO: type"); + }, uptime: obj.uptime.bind(obj), userInfo: obj.userInfo.bind(obj), version: obj.version.bind(obj), diff --git a/src/js/node/zlib.ts b/src/js/node/zlib.ts index ef8f56317b..77651013eb 100644 --- a/src/js/node/zlib.ts +++ b/src/js/node/zlib.ts @@ -206,7 +206,7 @@ function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { this._info = opts && opts.info; this._maxOutputLength = maxOutputLength; } -ZlibBase.prototype = Object.create(Transform.prototype); +$toClass(ZlibBase, "ZlibBase", Transform); ObjectDefineProperty(ZlibBase.prototype, "_closed", { configurable: true, @@ -576,7 +576,7 @@ function Zlib(opts, mode) { this._level = level; this._strategy = strategy; } -Zlib.prototype = Object.create(ZlibBase.prototype); +$toClass(Zlib, "Zlib", ZlibBase); // This callback is used by `.params()` to wait until a full flush happened before adjusting the parameters. // In particular, the call to the native `params()` function should not happen while a write is currently in progress on the threadpool. @@ -605,58 +605,63 @@ function Deflate(opts) { if (!(this instanceof Deflate)) return new Deflate(opts); Zlib.$apply(this, [opts, DEFLATE]); } -Deflate.prototype = Object.create(Zlib.prototype); +$toClass(Deflate, "Deflate", Zlib); function Inflate(opts) { if (!(this instanceof Inflate)) return new Inflate(opts); Zlib.$apply(this, [opts, INFLATE]); } -Inflate.prototype = Object.create(Zlib.prototype); +$toClass(Inflate, "Inflate", Zlib); function Gzip(opts) { if (!(this instanceof Gzip)) return new Gzip(opts); Zlib.$apply(this, [opts, GZIP]); } -Gzip.prototype = Object.create(Zlib.prototype); +$toClass(Gzip, "Gzip", Zlib); function Gunzip(opts) { if (!(this instanceof Gunzip)) return new Gunzip(opts); Zlib.$apply(this, [opts, GUNZIP]); } -Gunzip.prototype = Object.create(Zlib.prototype); +$toClass(Gunzip, "Gunzip", Zlib); function DeflateRaw(opts) { if (opts && opts.windowBits === 8) opts.windowBits = 9; if (!(this instanceof DeflateRaw)) return new DeflateRaw(opts); Zlib.$apply(this, [opts, DEFLATERAW]); } -DeflateRaw.prototype = Object.create(Zlib.prototype); +$toClass(DeflateRaw, "DeflateRaw", Zlib); function InflateRaw(opts) { if (!(this instanceof InflateRaw)) return new InflateRaw(opts); Zlib.$apply(this, [opts, INFLATERAW]); } -InflateRaw.prototype = Object.create(Zlib.prototype); +$toClass(InflateRaw, "InflateRaw", Zlib); function Unzip(opts) { if (!(this instanceof Unzip)) return new Unzip(opts); Zlib.$apply(this, [opts, UNZIP]); } -Unzip.prototype = Object.create(Zlib.prototype); +$toClass(Unzip, "Unzip", Zlib); -function createConvenienceMethod(ctor, sync) { +function createConvenienceMethod(ctor, sync, methodName) { if (sync) { - return function syncBufferWrapper(buffer, opts) { + const fn = function (buffer, opts) { return zlibBufferSync(new ctor(opts), buffer); }; + ObjectDefineProperty(fn, "name", { value: methodName }); + return fn; + } else { + const fn = function (buffer, opts, callback) { + if (typeof opts === "function") { + callback = opts; + opts = {}; + } + return zlibBuffer(new ctor(opts), buffer, callback); + }; + ObjectDefineProperty(fn, "name", { value: methodName }); + return fn; } - return function asyncBufferWrapper(buffer, opts, callback) { - if (typeof opts === "function") { - callback = opts; - opts = {}; - } - return zlibBuffer(new ctor(opts), buffer, callback); - }; } const kMaxBrotliParam = 9; @@ -696,29 +701,19 @@ function Brotli(opts, mode) { ZlibBase.$apply(this, [opts, mode, handle, brotliDefaultOpts]); } -Brotli.prototype = Object.create(Zlib.prototype); +$toClass(Brotli, "Brotli", Zlib); function BrotliCompress(opts) { if (!(this instanceof BrotliCompress)) return new BrotliCompress(opts); Brotli.$apply(this, [opts, BROTLI_ENCODE]); } -BrotliCompress.prototype = Object.create(Brotli.prototype); +$toClass(BrotliCompress, "BrotliCompress", Brotli); function BrotliDecompress(opts) { if (!(this instanceof BrotliDecompress)) return new BrotliDecompress(opts); Brotli.$apply(this, [opts, BROTLI_DECODE]); } -BrotliDecompress.prototype = Object.create(Brotli.prototype); - -function createProperty(ctor) { - return { - configurable: true, - enumerable: true, - value: function (options) { - return new ctor(options); - }, - }; -} +$toClass(BrotliDecompress, "BrotliDecompress", Brotli); // Legacy alias on the C++ wrapper object. ObjectDefineProperty(NativeZlib.prototype, "jsref", { @@ -743,36 +738,55 @@ const zlib = { BrotliCompress, BrotliDecompress, - deflate: createConvenienceMethod(Deflate, false), - deflateSync: createConvenienceMethod(Deflate, true), - gzip: createConvenienceMethod(Gzip, false), - gzipSync: createConvenienceMethod(Gzip, true), - deflateRaw: createConvenienceMethod(DeflateRaw, false), - deflateRawSync: createConvenienceMethod(DeflateRaw, true), - unzip: createConvenienceMethod(Unzip, false), - unzipSync: createConvenienceMethod(Unzip, true), - inflate: createConvenienceMethod(Inflate, false), - inflateSync: createConvenienceMethod(Inflate, true), - gunzip: createConvenienceMethod(Gunzip, false), - gunzipSync: createConvenienceMethod(Gunzip, true), - inflateRaw: createConvenienceMethod(InflateRaw, false), - inflateRawSync: createConvenienceMethod(InflateRaw, true), - brotliCompress: createConvenienceMethod(BrotliCompress, false), - brotliCompressSync: createConvenienceMethod(BrotliCompress, true), - brotliDecompress: createConvenienceMethod(BrotliDecompress, false), - brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true), + deflate: createConvenienceMethod(Deflate, false, "deflate"), + deflateSync: createConvenienceMethod(Deflate, true, "deflateSync"), + gzip: createConvenienceMethod(Gzip, false, "gzip"), + gzipSync: createConvenienceMethod(Gzip, true, "gzipSync"), + deflateRaw: createConvenienceMethod(DeflateRaw, false, "deflateRaw"), + deflateRawSync: createConvenienceMethod(DeflateRaw, true, "deflateRawSync"), + unzip: createConvenienceMethod(Unzip, false, "unzip"), + unzipSync: createConvenienceMethod(Unzip, true, "unzipSync"), + inflate: createConvenienceMethod(Inflate, false, "inflate"), + inflateSync: createConvenienceMethod(Inflate, true, "inflateSync"), + gunzip: createConvenienceMethod(Gunzip, false, "gunzip"), + gunzipSync: createConvenienceMethod(Gunzip, true, "gunzipSync"), + inflateRaw: createConvenienceMethod(InflateRaw, false, "inflateRaw"), + inflateRawSync: createConvenienceMethod(InflateRaw, true, "inflateRawSync"), + brotliCompress: createConvenienceMethod(BrotliCompress, false, "brotliCompress"), + brotliCompressSync: createConvenienceMethod(BrotliCompress, true, "brotliCompressSync"), + brotliDecompress: createConvenienceMethod(BrotliDecompress, false, "brotliDecompress"), + brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true, "brotliDecompressSync"), + + createDeflate: function (options) { + return new Deflate(options); + }, + createInflate: function (options) { + return new Inflate(options); + }, + createDeflateRaw: function (options) { + return new DeflateRaw(options); + }, + createInflateRaw: function (options) { + return new InflateRaw(options); + }, + createGzip: function (options) { + return new Gzip(options); + }, + createGunzip: function (options) { + return new Gunzip(options); + }, + createUnzip: function (options) { + return new Unzip(options); + }, + createBrotliCompress: function (options) { + return new BrotliCompress(options); + }, + createBrotliDecompress: function (options) { + return new BrotliDecompress(options); + }, }; ObjectDefineProperties(zlib, { - createDeflate: createProperty(Deflate), - createInflate: createProperty(Inflate), - createDeflateRaw: createProperty(DeflateRaw), - createInflateRaw: createProperty(InflateRaw), - createGzip: createProperty(Gzip), - createGunzip: createProperty(Gunzip), - createUnzip: createProperty(Unzip), - createBrotliCompress: createProperty(BrotliCompress), - createBrotliDecompress: createProperty(BrotliDecompress), constants: { enumerable: true, value: ObjectFreeze(constants), diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 9ada1a3890..aec6e3c8e6 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -29,7 +29,6 @@ pub const StrictModeReservedWords = tables.StrictModeReservedWords; pub const PropertyModifierKeyword = tables.PropertyModifierKeyword; pub const TypescriptStmtKeyword = tables.TypescriptStmtKeyword; pub const TypeScriptAccessibilityModifier = tables.TypeScriptAccessibilityModifier; -pub const ChildlessJSXTags = tables.ChildlessJSXTags; fn notimpl() noreturn { Output.panic("not implemented yet!", .{}); diff --git a/src/js_lexer_tables.zig b/src/js_lexer_tables.zig index bc87fb0e60..bb01d4112f 100644 --- a/src/js_lexer_tables.zig +++ b/src/js_lexer_tables.zig @@ -552,26 +552,6 @@ pub const TypescriptStmtKeyword = enum { }); }; -// Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`. -pub const ChildlessJSXTags = ComptimeStringMap(void, .{ - .{ "area", void }, - .{ "base", void }, - .{ "br", void }, - .{ "col", void }, - .{ "embed", void }, - .{ "hr", void }, - .{ "img", void }, - .{ "input", void }, - .{ "keygen", void }, - .{ "link", void }, - .{ "menuitem", void }, - .{ "meta", void }, - .{ "param", void }, - .{ "source", void }, - .{ "track", void }, - .{ "wbr", void }, -}); - // In a microbenchmark, this outperforms pub const jsxEntity = ComptimeStringMap(CodePoint, .{ .{ "Aacute", @as(CodePoint, 0x00C1) }, diff --git a/src/js_parser.zig b/src/js_parser.zig index 68153ae884..da72afcb55 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -16345,24 +16345,7 @@ fn NewParser_( const runtime = if (p.options.jsx.runtime == .automatic) options.JSX.Runtime.automatic else options.JSX.Runtime.classic; const is_key_after_spread = e_.flags.contains(.is_key_after_spread); - var children_count = e_.children.len; - - const is_childless_tag = FeatureFlags.react_specific_warnings and children_count > 0 and - tag.data == .e_string and tag.data.e_string.isUTF8() and js_lexer.ChildlessJSXTags.has(tag.data.e_string.slice(p.allocator)); - - children_count = if (is_childless_tag) 0 else children_count; - - if (children_count != e_.children.len) { - // Error: meta is a void element tag and must neither have `children` nor use `dangerouslySetInnerHTML`. - // ^ from react-dom - p.log.addWarningFmt( - p.source, - tag.loc, - p.allocator, - "\\<{s} /> is a void element and must not have \"children\"", - .{tag.data.e_string.slice(p.allocator)}, - ) catch {}; - } + const children_count = e_.children.len; // TODO: maybe we should split these into two different AST Nodes // That would reduce the amount of allocations a little diff --git a/test/js/node/string-module.test.js b/test/js/node/string-module.test.js new file mode 100644 index 0000000000..31d4777181 --- /dev/null +++ b/test/js/node/string-module.test.js @@ -0,0 +1,19 @@ +import { expect, test } from "bun:test"; + +test("should import and execute ES module from string", async () => { + const code = `export default function test(arg) { return arg + arg };`; + const mod = await import("data:text/javascript," + code).then(mod => mod.default); + const result = mod(1); + expect(result).toEqual(2); +}); + +test("should import and execute ES module from string (base64)", async () => { + const code = `export default function test(arg) { return arg + arg; }`; + const mod = await import("data:text/javascript;base64," + btoa(code)).then(mod => mod.default); + const result = mod(1); + expect(result).toEqual(2); +}); + +test("should throw when importing malformed string (base64)", async () => { + expect(() => import("data:text/javascript;base64,asdasdasd")).toThrowError("Base64DecodeError"); +}); diff --git a/test/js/node/zlib/zlib.test.js b/test/js/node/zlib/zlib.test.js index 7dfb652ee8..102bb91461 100644 --- a/test/js/node/zlib/zlib.test.js +++ b/test/js/node/zlib/zlib.test.js @@ -8,6 +8,51 @@ import * as stream from "node:stream"; import * as util from "node:util"; import * as zlib from "node:zlib"; +describe("prototype and name and constructor", () => { + for (let [name, Class] of [ + ["Gzip", zlib.Gzip], + ["Gunzip", zlib.Gunzip], + ["Deflate", zlib.Deflate], + ["Inflate", zlib.Inflate], + ["DeflateRaw", zlib.DeflateRaw], + ]) { + describe(`${name}`, () => { + it(`${name}.prototype should be instanceof ${name}.__proto__`, () => { + expect(Class.prototype).toBeInstanceOf(Class.__proto__); + }); + it(`${name}.prototype.constructor should be ${name}`, () => { + expect(Class.prototype.constructor).toBe(Class); + }); + it(`${name}.name should be ${name}`, () => { + expect(Class.name).toBe(name); + }); + it(`${name}.prototype.__proto__.constructor.name should be Zlib`, () => { + expect(Class.prototype.__proto__.constructor.name).toBe("Zlib"); + }); + }); + } + + for (let [name, Class] of [ + ["BrotliCompress", zlib.BrotliCompress], + ["BrotliDecompress", zlib.BrotliDecompress], + ]) { + describe(`${name}`, () => { + it(`${name}.prototype should be instanceof ${name}.__proto__`, () => { + expect(Class.prototype).toBeInstanceOf(Class.__proto__); + }); + it(`${name}.prototype.constructor should be ${name}`, () => { + expect(Class.prototype.constructor).toBe(Class); + }); + it(`${name}.name should be ${name}`, () => { + expect(Class.name).toBe(name); + }); + it(`${name}.prototype.__proto__.constructor.name should be Brotli`, () => { + expect(Class.prototype.__proto__.constructor.name).toBe("Brotli"); + }); + }); + } +}); + describe("zlib", () => { for (let library of ["zlib", "libdeflate"]) { for (let outputLibrary of ["zlib", "libdeflate"]) { diff --git a/test/regression/issue/14515.test.tsx b/test/regression/issue/14515.test.tsx new file mode 100644 index 0000000000..cdcc93ec64 --- /dev/null +++ b/test/regression/issue/14515.test.tsx @@ -0,0 +1,30 @@ +import { expect, test } from "bun:test"; + +export function Input(a: InlineInputAttrs, ch: DocumentFragment) { + const o_model = a.model + const nullable = (a.type||'').indexOf('null') > -1 + + return + {$on('input', (ev) => { + var v = ev.currentTarget.value + if (nullable && v === '') { + o_model.set(null!) + } else { + // @ts-ignore typescript is confused by the type of o_model, rightly so. + o_model.set(to_obs(v)) + } + })} + + + +} + +function _pad(n: number) { + return (n < 10 ? ('0' + n) : n) +} + +function _iso_date(d: Date) { + return `${d.getFullYear()}-${_pad(d.getMonth()+1)}-${_pad(d.getDate())}` +} + +test("runs without crashing", () => { }) diff --git a/test/runners/mocha.ts b/test/runners/mocha.ts new file mode 100644 index 0000000000..5c6a4881f9 --- /dev/null +++ b/test/runners/mocha.ts @@ -0,0 +1,15 @@ +import { describe, test, it } from "bun:test"; +import { beforeAll, beforeEach, afterAll, afterEach } from "bun:test"; + +function set(name: string, value: unknown): void { + // @ts-expect-error + globalThis[name] = value; +} + +set("describe", describe); +set("test", test); +set("it", it); +set("before", beforeAll); +set("beforeEach", beforeEach); +set("after", afterAll); +set("afterEach", afterEach); diff --git a/test/vendor.json b/test/vendor.json index e6eb7a4e67..bc704a7c45 100644 --- a/test/vendor.json +++ b/test/vendor.json @@ -3,13 +3,5 @@ "package": "elysia", "repository": "https://github.com/elysiajs/elysia", "tag": "1.1.24" - }, - { - "package": "uuid", - "repository": "https://github.com/uuidjs/uuid", - "tag": "v10.0.0", - "testRunner": "node", - "testPath": "src/test", - "skipTests": true } ]