diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 70d4e44c1d..e3adb70fb1 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -5,838 +5,1126 @@ * @link https://buildkite.com/docs/pipelines/defining-steps */ -import { writeFileSync } from "node:fs"; import { join } from "node:path"; import { getBootstrapVersion, + getBuildkiteEmoji, + getBuildMetadata, getBuildNumber, - getCanaryRevision, - getChangedFiles, - getCommit, getCommitMessage, + getEmoji, getEnv, getLastSuccessfulBuild, - getMainBranch, - getTargetBranch, isBuildkite, + isBuildManual, isFork, isMainBranch, isMergeQueue, - printEnvironment, + parseBoolean, spawnSafe, + startGroup, toYaml, uploadArtifact, + writeFile, } from "../scripts/utils.mjs"; /** - * @typedef PipelineOptions - * @property {string} [buildId] - * @property {boolean} [buildImages] - * @property {boolean} [publishImages] - * @property {boolean} [skipTests] + * @typedef {"linux" | "darwin" | "windows"} Os + * @typedef {"aarch64" | "x64"} Arch + * @typedef {"musl"} Abi + * @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro + * @typedef {"latest" | "previous" | "oldest" | "eol"} Tier + * @typedef {"release" | "assert" | "debug"} Profile */ /** - * @param {PipelineOptions} options + * @typedef Target + * @property {Os} os + * @property {Arch} arch + * @property {Abi} [abi] + * @property {boolean} [baseline] + * @property {boolean} [canary] + * @property {Profile} [profile] */ -function getPipeline(options) { - const { buildId, buildImages, publishImages, skipTests } = options; - /** - * Helpers - */ +/** + * @param {Target} target + * @returns {string} + */ +function getTargetKey(target) { + const { os, arch, abi, baseline, profile } = target; + let key = `${os}-${arch}`; + if (abi) { + key += `-${abi}`; + } + if (baseline) { + key += "-baseline"; + } + if (profile && profile !== "release") { + key += `-${profile}`; + } + return key; +} - /** - * @param {string} text - * @returns {string} - * @link https://github.com/buildkite/emojis#emoji-reference - */ - const getEmoji = string => { - if (string === "amazonlinux") { - return ":aws:"; - } - return `:${string}:`; +/** + * @param {Target} target + * @returns {string} + */ +function getTargetLabel(target) { + const { os, arch, abi, baseline, profile } = target; + let label = `${getBuildkiteEmoji(os)} ${arch}`; + if (abi) { + label += `-${abi}`; + } + if (baseline) { + label += "-baseline"; + } + if (profile && profile !== "release") { + label += `-${profile}`; + } + return label; +} + +/** + * @typedef Platform + * @property {Os} os + * @property {Arch} arch + * @property {Abi} [abi] + * @property {boolean} [baseline] + * @property {boolean} [canary] + * @property {Profile} [profile] + * @property {Distro} [distro] + * @property {string} release + * @property {Tier} [tier] + */ + +/** + * @type {Platform[]} + */ +const buildPlatforms = [ + { os: "darwin", arch: "aarch64", release: "14" }, + { os: "darwin", arch: "x64", release: "14" }, + { os: "linux", arch: "aarch64", distro: "debian", release: "11" }, + { os: "linux", arch: "x64", distro: "debian", release: "11" }, + { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" }, + { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, + { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, + { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, + { os: "windows", arch: "x64", release: "2019" }, + { os: "windows", arch: "x64", baseline: true, release: "2019" }, +]; + +/** + * @type {Platform[]} + */ +const testPlatforms = [ + { os: "darwin", arch: "aarch64", release: "14", tier: "latest" }, + { os: "darwin", arch: "aarch64", release: "13", tier: "previous" }, + { os: "darwin", arch: "x64", release: "14", tier: "latest" }, + { os: "darwin", arch: "x64", release: "13", tier: "previous" }, + { os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" }, + { os: "linux", arch: "aarch64", distro: "debian", release: "11", tier: "previous" }, + { os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" }, + { os: "linux", arch: "x64", distro: "debian", release: "11", tier: "previous" }, + { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" }, + { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11", tier: "previous" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04", tier: "previous" }, + { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", tier: "oldest" }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", tier: "previous" }, + { os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", tier: "oldest" }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" }, + { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" }, + { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" }, + { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" }, + { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20", tier: "latest" }, + { os: "windows", arch: "x64", release: "2025", tier: "latest" }, + { os: "windows", arch: "x64", release: "2022", tier: "previous" }, + { os: "windows", arch: "x64", release: "2019", tier: "oldest" }, + { os: "windows", arch: "x64", release: "2025", baseline: true, tier: "latest" }, + { os: "windows", arch: "x64", release: "2022", baseline: true, tier: "previous" }, + { os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" }, +]; + +/** + * @param {Platform} platform + * @returns {string} + */ +function getPlatformKey(platform) { + const { distro, release } = platform; + const target = getTargetKey(platform); + const version = release.replace(/\./g, ""); + if (distro) { + return `${target}-${distro}-${version}`; + } + return `${target}-${version}`; +} + +/** + * @param {Platform} platform + * @returns {string} + */ +function getPlatformLabel(platform) { + const { os, arch, baseline, profile, distro, release } = platform; + let label = `${getBuildkiteEmoji(distro || os)} ${release} ${arch}`; + if (baseline) { + label += "-baseline"; + } + if (profile && profile !== "release") { + label += `-${profile}`; + } + return label; +} + +/** + * @param {Platform} platform + * @returns {string} + */ +function getImageKey(platform) { + const { os, arch, distro, release } = platform; + const version = release.replace(/\./g, ""); + if (distro) { + return `${os}-${arch}-${distro}-${version}`; + } + return `${os}-${arch}-${version}`; +} + +/** + * @param {Platform} platform + * @returns {string} + */ +function getImageLabel(platform) { + const { os, arch, distro, release } = platform; + return `${getBuildkiteEmoji(distro || os)} ${release} ${arch}`; +} + +/** + * @param {Platform} platform + * @param {boolean} [dryRun] + * @returns {string} + */ +function getImageName(platform, dryRun) { + const { os, arch, distro, release } = platform; + const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; + if (dryRun) { + return `${name}-build-${getBuildNumber()}`; + } + return `${name}-v${getBootstrapVersion(os)}`; +} + +/** + * @param {number} [limit] + * @link https://buildkite.com/docs/pipelines/command-step#retry-attributes + */ +function getRetry(limit = 0) { + return { + manual: { + permit_on_passed: true, + }, + automatic: [ + { exit_status: 1, limit }, + { exit_status: -1, limit: 3 }, + { exit_status: 255, limit: 3 }, + { signal_reason: "cancel", limit: 3 }, + { signal_reason: "agent_stop", limit: 3 }, + ], }; +} - /** - * @typedef {"linux" | "darwin" | "windows"} Os - * @typedef {"aarch64" | "x64"} Arch - * @typedef {"musl"} Abi - */ +/** + * @returns {number} + * @link https://buildkite.com/docs/pipelines/managing-priorities + */ +function getPriority() { + if (isFork()) { + return -1; + } + if (isMainBranch()) { + return 2; + } + if (isMergeQueue()) { + return 1; + } + return 0; +} - /** - * @typedef Target - * @property {Os} os - * @property {Arch} arch - * @property {Abi} [abi] - * @property {boolean} [baseline] - */ +/** + * Agents + */ - /** - * @param {Target} target - * @returns {string} - */ - const getTargetKey = target => { - const { os, arch, abi, baseline } = target; - let key = `${os}-${arch}`; - if (abi) { - key += `-${abi}`; - } - if (baseline) { - key += "-baseline"; - } - return key; +/** + * @typedef {Object} Ec2Options + * @property {string} instanceType + * @property {number} cpuCount + * @property {number} threadsPerCore + */ + +/** + * @param {Platform} platform + * @param {Ec2Options} options + * @returns {Agent} + */ +function getEc2Agent(platform, options) { + const { os, arch, abi, distro, release } = platform; + const { instanceType, cpuCount, threadsPerCore } = options; + return { + os, + arch, + abi, + distro, + release, + // The agent is created by robobun, see more details here: + // https://github.com/oven-sh/robobun/blob/d46c07e0ac5ac0f9ffe1012f0e98b59e1a0d387a/src/robobun.ts#L1707 + robobun: true, + robobun2: true, + "image-name": getImageName(platform), + "instance-type": instanceType, + "cpu-count": cpuCount, + "threads-per-core": threadsPerCore, + "preemptible": false, }; +} - /** - * @param {Target} target - * @returns {string} - */ - const getTargetLabel = target => { - const { os, arch, abi, baseline } = target; - let label = `${getEmoji(os)} ${arch}`; - if (abi) { - label += `-${abi}`; - } - if (baseline) { - label += "-baseline"; - } - return label; - }; +/** + * @param {Platform} platform + * @returns {string} + */ +function getCppAgent(platform) { + const { os, arch } = platform; - /** - * @typedef Platform - * @property {Os} os - * @property {Arch} arch - * @property {Abi} [abi] - * @property {boolean} [baseline] - * @property {string} [distro] - * @property {string} release - */ - - /** - * @param {Platform} platform - * @returns {string} - */ - const getPlatformKey = platform => { - const { os, arch, abi, baseline, distro, release } = platform; - const target = getTargetKey({ os, arch, abi, baseline }); - if (distro) { - return `${target}-${distro}-${release.replace(/\./g, "")}`; - } - return `${target}-${release.replace(/\./g, "")}`; - }; - - /** - * @param {Platform} platform - * @returns {string} - */ - const getPlatformLabel = platform => { - const { os, arch, baseline, distro, release } = platform; - let label = `${getEmoji(distro || os)} ${release} ${arch}`; - if (baseline) { - label += "-baseline"; - } - return label; - }; - - /** - * @param {Platform} platform - * @returns {string} - */ - const getImageKey = platform => { - const { os, arch, distro, release } = platform; - if (distro) { - return `${os}-${arch}-${distro}-${release.replace(/\./g, "")}`; - } - return `${os}-${arch}-${release.replace(/\./g, "")}`; - }; - - /** - * @param {Platform} platform - * @returns {string} - */ - const getImageLabel = platform => { - const { os, arch, distro, release } = platform; - return `${getEmoji(distro || os)} ${release} ${arch}`; - }; - - /** - * @param {number} [limit] - * @link https://buildkite.com/docs/pipelines/command-step#retry-attributes - */ - const getRetry = (limit = 0) => { - return { - automatic: [ - { exit_status: 1, limit }, - { exit_status: -1, limit: 3 }, - { exit_status: 255, limit: 3 }, - { signal_reason: "agent_stop", limit: 3 }, - ], - }; - }; - - /** - * @returns {number} - * @link https://buildkite.com/docs/pipelines/managing-priorities - */ - const getPriority = () => { - if (isFork()) { - return -1; - } - if (isMainBranch()) { - return 2; - } - if (isMergeQueue()) { - return 1; - } - return 0; - }; - - /** - * @param {Target} target - * @returns {Record} - */ - const getBuildEnv = target => { - const { baseline, abi } = target; - return { - ENABLE_BASELINE: baseline ? "ON" : "OFF", - ABI: abi === "musl" ? "musl" : undefined, - }; - }; - - /** - * @param {Target} target - * @returns {string} - */ - const getBuildToolchain = target => { - const { os, arch, abi, baseline } = target; - let key = `${os}-${arch}`; - if (abi) { - key += `-${abi}`; - } - if (baseline) { - key += "-baseline"; - } - return key; - }; - - /** - * Agents - */ - - /** - * @typedef {Record} Agent - */ - - /** - * @param {Platform} platform - * @returns {boolean} - */ - const isUsingNewAgent = platform => { - const { os } = platform; - if (os === "linux") { - return true; - } - return false; - }; - - /** - * @param {"v1" | "v2"} version - * @param {Platform} platform - * @param {string} [instanceType] - * @returns {Agent} - */ - const getEmphemeralAgent = (version, platform, instanceType) => { - const { os, arch, abi, distro, release } = platform; - if (version === "v1") { - return { - robobun: true, - os, - arch, - distro, - release, - }; - } - let image; - if (distro) { - image = `${os}-${arch}-${distro}-${release}`; - } else { - image = `${os}-${arch}-${release}`; - } - if (buildImages && !publishImages) { - image += `-build-${getBuildNumber()}`; - } else { - image += `-v${getBootstrapVersion()}`; - } - return { - robobun: true, - robobun2: true, - os, - arch, - abi, - distro, - release, - "image-name": image, - "instance-type": instanceType, - }; - }; - - /** - * @param {Target} target - * @returns {Agent} - */ - const getBuildAgent = target => { - const { os, arch, abi } = target; - if (isUsingNewAgent(target)) { - const instanceType = arch === "aarch64" ? "c8g.8xlarge" : "c7i.8xlarge"; - return getEmphemeralAgent("v2", target, instanceType); - } + if (os === "darwin") { return { queue: `build-${os}`, os, arch, - abi, }; + } + + return getEc2Agent(platform, { + instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge", + cpuCount: 64, + threadsPerCore: 1, + }); +} + +/** + * @param {Platform} platform + * @returns {Agent} + */ +function getZigAgent(platform) { + const { arch } = platform; + + return { + queue: "build-zig", }; - /** - * @param {Target} target - * @returns {Agent} - */ - const getZigAgent = platform => { - const { arch } = platform; - const instanceType = arch === "aarch64" ? "c8g.2xlarge" : "c7i.2xlarge"; + // return getEc2Agent( + // { + // os: "linux", + // arch, + // distro: "debian", + // release: "11", + // }, + // { + // instanceType: arch === "aarch64" ? "c8g.2xlarge" : "c7i.2xlarge", + // cpuCount: 8, + // threadsPerCore: 1, + // }, + // ); +} + +/** + * @param {Platform} platform + * @returns {Agent} + */ +function getTestAgent(platform) { + const { os, arch } = platform; + + if (os === "darwin") { return { - robobun: true, - robobun2: true, - os: "linux", + queue: `test-${os}`, + os, arch, - distro: "debian", - release: "11", - "image-name": `linux-${arch}-debian-11-v5`, // v5 is not on main yet - "instance-type": instanceType, }; - // TODO: Temporarily disable due to configuration - // return { - // queue: "build-zig", - // }; + } + + // TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory + // at 8GB of memory, so use 16GB instead. + if (os === "windows") { + return getEc2Agent(platform, { + instanceType: "c7i.2xlarge", + cpuCount: 1, + threadsPerCore: 1, + }); + } + + if (arch === "aarch64") { + return getEc2Agent(platform, { + instanceType: "c8g.xlarge", + cpuCount: 1, + threadsPerCore: 1, + }); + } + + return getEc2Agent(platform, { + instanceType: "c7i.xlarge", + cpuCount: 1, + threadsPerCore: 1, + }); +} + +/** + * Steps + */ + +/** + * @param {Target} target + * @returns {Record} + */ +function getBuildEnv(target) { + const { profile, baseline, canary, abi } = target; + const release = !profile || profile === "release"; + + return { + CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo", + ENABLE_BASELINE: baseline ? "ON" : "OFF", + ENABLE_CANARY: canary ? "ON" : "OFF", + ENABLE_ASSERTIONS: release ? "OFF" : "ON", + ENABLE_LOGS: release ? "OFF" : "ON", + ABI: abi === "musl" ? "musl" : undefined, }; +} - /** - * @param {Platform} platform - * @returns {Agent} - */ - const getTestAgent = platform => { - const { os, arch, release } = platform; - if (isUsingNewAgent(platform)) { - const instanceType = arch === "aarch64" ? "t4g.large" : "t3.large"; - return getEmphemeralAgent("v2", platform, instanceType); - } - if (os === "darwin") { - return { - os, - arch, - release, - queue: "test-darwin", - }; - } - return getEmphemeralAgent("v1", platform); +/** + * @param {Platform} platform + * @returns {Step} + */ +function getBuildVendorStep(platform) { + return { + key: `${getTargetKey(platform)}-build-vendor`, + label: `${getTargetLabel(platform)} - build-vendor`, + agents: getCppAgent(platform), + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: getBuildEnv(platform), + command: "bun run build:ci --target dependencies", }; +} - /** - * Steps - */ - - /** - * @typedef Step - * @property {string} key - * @property {string} [label] - * @property {Record} [agents] - * @property {Record} [env] - * @property {string} command - * @property {string[]} [depends_on] - * @property {Record} [retry] - * @property {boolean} [cancel_on_build_failing] - * @property {boolean} [soft_fail] - * @property {number} [parallelism] - * @property {number} [concurrency] - * @property {string} [concurrency_group] - * @property {number} [priority] - * @property {number} [timeout_in_minutes] - * @link https://buildkite.com/docs/pipelines/command-step - */ - - /** - * @param {Platform} platform - * @param {string} [step] - * @returns {string[]} - */ - const getDependsOn = (platform, step) => { - if (imagePlatforms.has(getImageKey(platform))) { - const key = `${getImageKey(platform)}-build-image`; - if (key !== step) { - return [key]; - } - } - return []; +/** + * @param {Platform} platform + * @returns {Step} + */ +function getBuildCppStep(platform) { + return { + key: `${getTargetKey(platform)}-build-cpp`, + label: `${getTargetLabel(platform)} - build-cpp`, + agents: getCppAgent(platform), + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: { + BUN_CPP_ONLY: "ON", + ...getBuildEnv(platform), + }, + command: "bun run build:ci --target bun", }; +} - /** - * @param {Platform} platform - * @returns {Step} - */ - const getBuildImageStep = platform => { - const { os, arch, distro, release } = platform; - const action = publishImages ? "publish-image" : "create-image"; - return { - key: `${getImageKey(platform)}-build-image`, - label: `${getImageLabel(platform)} - build-image`, - agents: { - queue: "build-image", - }, - env: { - DEBUG: "1", - }, - retry: getRetry(), - command: `node ./scripts/machine.mjs ${action} --ci --cloud=aws --os=${os} --arch=${arch} --distro=${distro} --distro-version=${release}`, - }; +/** + * @param {Target} target + * @returns {string} + */ +function getBuildToolchain(target) { + const { os, arch, abi, baseline } = target; + let key = `${os}-${arch}`; + if (abi) { + key += `-${abi}`; + } + if (baseline) { + key += "-baseline"; + } + return key; +} + +/** + * @param {Platform} platform + * @returns {Step} + */ +function getBuildZigStep(platform) { + const toolchain = getBuildToolchain(platform); + return { + key: `${getTargetKey(platform)}-build-zig`, + label: `${getTargetLabel(platform)} - build-zig`, + agents: getZigAgent(platform), + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: getBuildEnv(platform), + command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, }; +} - /** - * @param {Platform} platform - * @returns {Step} - */ - const getBuildVendorStep = platform => { - return { - key: `${getTargetKey(platform)}-build-vendor`, - label: `${getTargetLabel(platform)} - build-vendor`, - depends_on: getDependsOn(platform), - agents: getBuildAgent(platform), - retry: getRetry(), - cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), - command: "bun run build:ci --target dependencies", - }; +/** + * @param {Platform} platform + * @returns {Step} + */ +function getLinkBunStep(platform) { + return { + key: `${getTargetKey(platform)}-build-bun`, + label: `${getTargetLabel(platform)} - build-bun`, + depends_on: [ + `${getTargetKey(platform)}-build-vendor`, + `${getTargetKey(platform)}-build-cpp`, + `${getTargetKey(platform)}-build-zig`, + ], + agents: getCppAgent(platform), + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: { + BUN_LINK_ONLY: "ON", + ...getBuildEnv(platform), + }, + command: "bun run build:ci --target bun", }; +} - /** - * @param {Platform} platform - * @returns {Step} - */ - const getBuildCppStep = platform => { - return { - key: `${getTargetKey(platform)}-build-cpp`, - label: `${getTargetLabel(platform)} - build-cpp`, - depends_on: getDependsOn(platform), - agents: getBuildAgent(platform), - retry: getRetry(), - cancel_on_build_failing: isMergeQueue(), - env: { - BUN_CPP_ONLY: "ON", - ...getBuildEnv(platform), - }, - command: "bun run build:ci --target bun", - }; +/** + * @param {Platform} platform + * @returns {Step} + */ +function getBuildBunStep(platform) { + return { + key: `${getTargetKey(platform)}-build-bun`, + label: `${getTargetLabel(platform)} - build-bun`, + agents: getCppAgent(platform), + retry: getRetry(), + cancel_on_build_failing: isMergeQueue(), + env: getBuildEnv(platform), + command: "bun run build:ci", }; +} - /** - * @param {Platform} platform - * @returns {Step} - */ - const getBuildZigStep = platform => { - const toolchain = getBuildToolchain(platform); - return { - key: `${getTargetKey(platform)}-build-zig`, - label: `${getTargetLabel(platform)} - build-zig`, - depends_on: getDependsOn(platform), - agents: getZigAgent(platform), - retry: getRetry(), - cancel_on_build_failing: isMergeQueue(), - env: getBuildEnv(platform), - command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, - }; +/** + * @typedef {Object} TestOptions + * @property {string} [buildId] + * @property {boolean} [unifiedTests] + * @property {string[]} [testFiles] + */ + +/** + * @param {Platform} platform + * @param {TestOptions} [options] + * @returns {Step} + */ +function getTestBunStep(platform, options = {}) { + const { os } = platform; + const { buildId, unifiedTests, testFiles } = options; + + const args = [`--step=${getTargetKey(platform)}-build-bun`]; + if (buildId) { + args.push(`--build-id=${buildId}`); + } + if (testFiles) { + args.push(...testFiles.map(testFile => `--include=${testFile}`)); + } + + const depends = []; + if (!buildId) { + depends.push(`${getTargetKey(platform)}-build-bun`); + } + + return { + key: `${getPlatformKey(platform)}-test-bun`, + label: `${getPlatformLabel(platform)} - test-bun`, + depends_on: depends, + agents: getTestAgent(platform), + cancel_on_build_failing: isMergeQueue(), + retry: getRetry(), + soft_fail: isMainBranch() ? true : [{ exit_status: 2 }], + parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10, + command: + os === "windows" + ? `node .\\scripts\\runner.node.mjs ${args.join(" ")}` + : `./scripts/runner.node.mjs ${args.join(" ")}`, }; +} - /** - * @param {Platform} platform - * @returns {Step} - */ - const getBuildBunStep = platform => { - return { - key: `${getTargetKey(platform)}-build-bun`, - label: `${getTargetLabel(platform)} - build-bun`, - depends_on: [ - `${getTargetKey(platform)}-build-vendor`, - `${getTargetKey(platform)}-build-cpp`, - `${getTargetKey(platform)}-build-zig`, - ], - agents: getBuildAgent(platform), - retry: getRetry(), - cancel_on_build_failing: isMergeQueue(), - env: { - BUN_LINK_ONLY: "ON", - ...getBuildEnv(platform), - }, - command: "bun run build:ci --target bun", - }; +/** + * @param {Platform} platform + * @param {boolean} [dryRun] + * @returns {Step} + */ +function getBuildImageStep(platform, dryRun) { + const { os, arch, distro, release } = platform; + const action = dryRun ? "create-image" : "publish-image"; + const command = [ + "node", + "./scripts/machine.mjs", + action, + `--os=${os}`, + `--arch=${arch}`, + distro && `--distro=${distro}`, + `--release=${release}`, + "--cloud=aws", + "--ci", + "--authorized-org=oven-sh", + ]; + return { + key: `${getImageKey(platform)}-build-image`, + label: `${getImageLabel(platform)} - build-image`, + agents: { + queue: "build-image", + }, + env: { + DEBUG: "1", + }, + retry: getRetry(), + command: command.filter(Boolean).join(" "), + timeout_in_minutes: 3 * 60, }; +} - /** - * @param {Platform} platform - * @returns {Step} - */ - const getTestBunStep = platform => { - const { os } = platform; - let command; - if (os === "windows") { - command = `node .\\scripts\\runner.node.mjs --step ${getTargetKey(platform)}-build-bun`; - } else { - command = `./scripts/runner.node.mjs --step ${getTargetKey(platform)}-build-bun`; - } - let parallelism; - if (os === "darwin") { - parallelism = 2; - } else { - parallelism = 10; - } - let env; - let depends = []; - if (buildId) { - env = { - BUILDKITE_ARTIFACT_BUILD_ID: buildId, - }; - } else { - depends = [`${getTargetKey(platform)}-build-bun`]; - } - let retry; - if (os !== "windows") { - // When the runner fails on Windows, Buildkite only detects an exit code of 1. - // Because of this, we don't know if the run was fatal, or soft-failed. - retry = getRetry(1); - } - let soft_fail; - if (isMainBranch()) { - soft_fail = true; - } else { - soft_fail = [{ exit_status: 2 }]; - } - return { - key: `${getPlatformKey(platform)}-test-bun`, - label: `${getPlatformLabel(platform)} - test-bun`, - depends_on: [...depends, ...getDependsOn(platform)], - agents: getTestAgent(platform), - retry, - cancel_on_build_failing: isMergeQueue(), - soft_fail, - parallelism, - command, - env, - }; +/** + * @param {Platform[]} [buildPlatforms] + * @returns {Step} + */ +function getReleaseStep(buildPlatforms) { + return { + key: "release", + label: getBuildkiteEmoji("rocket"), + agents: { + queue: "test-darwin", + }, + depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`), + command: ".buildkite/scripts/upload-release.sh", }; +} - /** - * Config - */ +/** + * @typedef {Object} Pipeline + * @property {Step[]} [steps] + * @property {number} [priority] + */ - /** - * @type {Platform[]} - */ - const buildPlatforms = [ - { os: "darwin", arch: "aarch64", release: "14" }, - { os: "darwin", arch: "x64", release: "14" }, - { os: "linux", arch: "aarch64", distro: "debian", release: "11" }, - { os: "linux", arch: "x64", distro: "debian", release: "11" }, - { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" }, - { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, - { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, - { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, - { os: "windows", arch: "x64", release: "2019" }, - { os: "windows", arch: "x64", baseline: true, release: "2019" }, +/** + * @typedef {Record} Agent + */ + +/** + * @typedef {GroupStep | CommandStep | BlockStep} Step + */ + +/** + * @typedef {Object} GroupStep + * @property {string} key + * @property {string} group + * @property {Step[]} steps + * @property {string[]} [depends_on] + */ + +/** + * @typedef {Object} CommandStep + * @property {string} key + * @property {string} [label] + * @property {Record} [agents] + * @property {Record} [env] + * @property {string} command + * @property {string[]} [depends_on] + * @property {Record} [retry] + * @property {boolean} [cancel_on_build_failing] + * @property {boolean} [soft_fail] + * @property {number} [parallelism] + * @property {number} [concurrency] + * @property {string} [concurrency_group] + * @property {number} [priority] + * @property {number} [timeout_in_minutes] + * @link https://buildkite.com/docs/pipelines/command-step + */ + +/** + * @typedef {Object} BlockStep + * @property {string} key + * @property {string} block + * @property {string} [prompt] + * @property {"passed" | "failed" | "running"} [blocked_state] + * @property {(SelectInput | TextInput)[]} [fields] + */ + +/** + * @typedef {Object} TextInput + * @property {string} key + * @property {string} text + * @property {string} [default] + * @property {boolean} [required] + * @property {string} [hint] + */ + +/** + * @typedef {Object} SelectInput + * @property {string} key + * @property {string} select + * @property {string | string[]} [default] + * @property {boolean} [required] + * @property {boolean} [multiple] + * @property {string} [hint] + * @property {SelectOption[]} [options] + */ + +/** + * @typedef {Object} SelectOption + * @property {string} label + * @property {string} value + */ + +/** + * @typedef {Object} PipelineOptions + * @property {string | boolean} [skipEverything] + * @property {string | boolean} [skipBuilds] + * @property {string | boolean} [skipTests] + * @property {string | boolean} [forceBuilds] + * @property {string | boolean} [forceTests] + * @property {string | boolean} [buildImages] + * @property {string | boolean} [publishImages] + * @property {boolean} [canary] + * @property {Profile[]} [buildProfiles] + * @property {Platform[]} [buildPlatforms] + * @property {Platform[]} [testPlatforms] + * @property {string[]} [testFiles] + * @property {boolean} [unifiedBuilds] + * @property {boolean} [unifiedTests] + */ + +/** + * @param {Step} step + * @param {(string | undefined)[]} dependsOn + * @returns {Step} + */ +function getStepWithDependsOn(step, ...dependsOn) { + const { depends_on: existingDependsOn = [] } = step; + return { + ...step, + depends_on: [...existingDependsOn, ...dependsOn.filter(Boolean)], + }; +} + +/** + * @returns {BlockStep} + */ +function getOptionsStep() { + const booleanOptions = [ + { + label: `${getEmoji("true")} Yes`, + value: "true", + }, + { + label: `${getEmoji("false")} No`, + value: "false", + }, ]; - /** - * @type {Platform[]} - */ - const testPlatforms = [ - { os: "darwin", arch: "aarch64", release: "14" }, - { os: "darwin", arch: "aarch64", release: "13" }, - { os: "darwin", arch: "x64", release: "14" }, - { os: "darwin", arch: "x64", release: "13" }, - { os: "linux", arch: "aarch64", distro: "debian", release: "12" }, - { os: "linux", arch: "aarch64", distro: "debian", release: "11" }, - { os: "linux", arch: "x64", distro: "debian", release: "12" }, - { os: "linux", arch: "x64", distro: "debian", release: "11" }, - { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12" }, - { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04" }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04" }, - { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" }, - { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" }, - { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" }, - { os: "windows", arch: "x64", release: "2019" }, - { os: "windows", arch: "x64", baseline: true, release: "2019" }, - ]; + return { + key: "options", + block: getBuildkiteEmoji("clipboard"), + blocked_state: "running", + fields: [ + { + key: "canary", + select: "If building, is this a canary build?", + hint: "If you are building for a release, this should be false", + required: false, + default: "true", + options: booleanOptions, + }, + { + key: "skip-builds", + select: "Do you want to skip the build?", + hint: "If true, artifacts will be downloaded from the last successful build", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "skip-tests", + select: "Do you want to skip the tests?", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "force-builds", + select: "Do you want to force run the build?", + hint: "If true, the build will run even if no source files have changed", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "force-tests", + select: "Do you want to force run the tests?", + hint: "If true, the tests will run even if no test files have changed", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "build-profiles", + select: "If building, which profiles do you want to build?", + required: false, + multiple: true, + default: ["release"], + options: [ + { + label: `${getEmoji("release")} Release`, + value: "release", + }, + { + label: `${getEmoji("assert")} Release with Assertions`, + value: "assert", + }, + { + label: `${getEmoji("debug")} Debug`, + value: "debug", + }, + ], + }, + { + key: "build-platforms", + select: "If building, which platforms do you want to build?", + hint: "If this is left blank, all platforms are built", + required: false, + multiple: true, + default: [], + options: buildPlatforms.map(platform => { + const { os, arch, abi, baseline } = platform; + let label = `${getEmoji(os)} ${arch}`; + if (abi) { + label += `-${abi}`; + } + if (baseline) { + label += `-baseline`; + } + return { + label, + value: getTargetKey(platform), + }; + }), + }, + { + key: "test-platforms", + select: "If testing, which platforms do you want to test?", + hint: "If this is left blank, all platforms are tested", + required: false, + multiple: true, + default: [], + options: [...new Map(testPlatforms.map(platform => [getImageKey(platform), platform])).entries()].map( + ([key, platform]) => { + const { os, arch, abi, distro, release } = platform; + let label = `${getEmoji(os)} ${arch}`; + if (abi) { + label += `-${abi}`; + } + if (distro) { + label += ` ${distro}`; + } + if (release) { + label += ` ${release}`; + } + return { + label, + value: key, + }; + }, + ), + }, + { + key: "test-files", + text: "If testing, which files do you want to test?", + hint: "If specified, only run test paths that include the list of strings (e.g. 'test/js', 'test/cli/hot/watch.ts')", + required: false, + }, + { + key: "build-images", + select: "Do you want to re-build the base images?", + hint: "This can take 2-3 hours to complete, only do so if you've tested locally", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "publish-images", + select: "Do you want to re-build and publish the base images?", + hint: "This can take 2-3 hours to complete, only do so if you've tested locally", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "unified-builds", + select: "Do you want to build each platform in a single step?", + hint: "If true, builds will not be split into seperate steps (this will likely slow down the build)", + required: false, + default: "false", + options: booleanOptions, + }, + { + key: "unified-tests", + select: "Do you want to run tests in a single step?", + hint: "If true, tests will not be split into seperate steps (this will be very slow)", + required: false, + default: "false", + options: booleanOptions, + }, + ], + }; +} +/** + * @returns {Step} + */ +function getOptionsApplyStep() { + const command = getEnv("BUILDKITE_COMMAND"); + return { + key: "options-apply", + label: getBuildkiteEmoji("gear"), + command: `${command} --apply`, + depends_on: ["options"], + agents: { + queue: getEnv("BUILDKITE_AGENT_META_DATA_QUEUE", false), + }, + }; +} + +/** + * @returns {Promise} + */ +async function getPipelineOptions() { + const isManual = isBuildManual(); + if (isManual && !process.argv.includes("--apply")) { + return; + } + + const buildPlatformsMap = new Map(buildPlatforms.map(platform => [getTargetKey(platform), platform])); + const testPlatformsMap = new Map(testPlatforms.map(platform => [getPlatformKey(platform), platform])); + + if (isManual) { + const { fields } = getOptionsStep(); + const keys = fields?.map(({ key }) => key) ?? []; + const values = await Promise.all(keys.map(getBuildMetadata)); + const options = Object.fromEntries(keys.map((key, index) => [key, values[index]])); + + /** + * @param {string} value + * @returns {string[] | undefined} + */ + const parseArray = value => + value + ?.split("\n") + ?.map(item => item.trim()) + ?.filter(Boolean); + + const buildPlatformKeys = parseArray(options["build-platforms"]); + const testPlatformKeys = parseArray(options["test-platforms"]); + return { + canary: parseBoolean(options["canary"]), + skipBuilds: parseBoolean(options["skip-builds"]), + forceBuilds: parseBoolean(options["force-builds"]), + skipTests: parseBoolean(options["skip-tests"]), + testFiles: parseArray(options["test-files"]), + buildImages: parseBoolean(options["build-images"]), + publishImages: parseBoolean(options["publish-images"]), + unifiedBuilds: parseBoolean(options["unified-builds"]), + unifiedTests: parseBoolean(options["unified-tests"]), + buildProfiles: parseArray(options["build-profiles"]), + buildPlatforms: buildPlatformKeys?.length + ? buildPlatformKeys.map(key => buildPlatformsMap.get(key)) + : Array.from(buildPlatformsMap.values()), + testPlatforms: testPlatformKeys?.length + ? testPlatformKeys.map(key => testPlatformsMap.get(key)) + : Array.from(testPlatformsMap.values()), + }; + } + + const commitMessage = getCommitMessage(); + + /** + * @param {RegExp} pattern + * @returns {string | boolean} + */ + const parseOption = pattern => { + const match = pattern.exec(commitMessage); + if (match) { + const [, value] = match; + return value; + } + return false; + }; + + return { + canary: + !parseBoolean(getEnv("RELEASE", false) || "false") && + !/\[(release|build release|release build)\]/i.test(commitMessage), + skipEverything: parseOption(/\[(skip ci|no ci)\]/i), + skipBuilds: parseOption(/\[(skip builds?|no builds?|only tests?)\]/i), + forceBuilds: parseOption(/\[(force builds?)\]/i), + skipTests: parseOption(/\[(skip tests?|no tests?|only builds?)\]/i), + buildPlatforms: Array.from(buildPlatformsMap.values()), + testPlatforms: Array.from(testPlatformsMap.values()), + buildProfiles: ["release"], + }; +} + +/** + * @param {PipelineOptions} [options] + * @returns {Promise} + */ +async function getPipeline(options = {}) { + const priority = getPriority(); + + if (isBuildManual() && !Object.keys(options).length) { + return { + priority, + steps: [getOptionsStep(), getOptionsApplyStep()], + }; + } + + const { skipEverything } = options; + if (skipEverything) { + return; + } + + const { buildProfiles = [], buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options; const imagePlatforms = new Map( - [...buildPlatforms, ...testPlatforms] - .filter(platform => buildImages && isUsingNewAgent(platform)) - .map(platform => [getImageKey(platform), platform]), + buildImages || publishImages + ? [...buildPlatforms, ...testPlatforms] + .filter(({ os }) => os === "linux" || os === "windows") + .map(platform => [getImageKey(platform), platform]) + : [], ); - /** - * @type {Step[]} - */ + /** @type {Step[]} */ const steps = []; if (imagePlatforms.size) { steps.push({ - group: ":docker:", - steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform)), + key: "build-images", + group: getBuildkiteEmoji("aws"), + steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, !publishImages)), }); } - for (const platform of buildPlatforms) { - const { os, arch, abi, baseline } = platform; + const { skipBuilds, forceBuilds, unifiedBuilds } = options; - /** @type {Step[]} */ - const platformSteps = []; - - if (buildImages || !buildId) { - platformSteps.push( - getBuildVendorStep(platform), - getBuildCppStep(platform), - getBuildZigStep(platform), - getBuildBunStep(platform), - ); + /** @type {string | undefined} */ + let buildId; + if (skipBuilds && !forceBuilds) { + const lastBuild = await getLastSuccessfulBuild(); + if (lastBuild) { + const { id } = lastBuild; + buildId = id; + } else { + console.warn("No last successful build found, must force builds..."); } + } - if (!skipTests) { - platformSteps.push( - ...testPlatforms - .filter( - testPlatform => - testPlatform.os === os && - testPlatform.arch === arch && - testPlatform.abi === abi && - testPlatform.baseline === baseline, - ) - .map(testPlatform => getTestBunStep(testPlatform)), - ); - } + if (!buildId) { + steps.push( + ...buildPlatforms + .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) + .map(target => { + const imageKey = getImageKey(target); + const imagePlatform = imagePlatforms.get(imageKey); - if (!platformSteps.length) { + return getStepWithDependsOn( + { + key: getTargetKey(target), + group: getTargetLabel(target), + steps: unifiedBuilds + ? [getBuildBunStep(target)] + : [ + getBuildVendorStep(target), + getBuildCppStep(target), + getBuildZigStep(target), + getLinkBunStep(target), + ], + }, + imagePlatform ? `${imageKey}-build-image` : undefined, + ); + }), + ); + } + + const { skipTests, forceTests, unifiedTests, testFiles } = options; + if (!skipTests || forceTests) { + steps.push( + ...testPlatforms + .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) + .map(target => ({ + key: getTargetKey(target), + group: getTargetLabel(target), + steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId })], + })), + ); + } + + if (isMainBranch()) { + steps.push(getReleaseStep(buildPlatforms)); + } + + /** @type {Map} */ + const stepsByGroup = new Map(); + + for (let i = 0; i < steps.length; i++) { + const step = steps[i]; + if (!("group" in step)) { continue; } - steps.push({ - key: getTargetKey(platform), - group: getTargetLabel(platform), - steps: platformSteps, - }); - } + const { group, steps: groupSteps } = step; + if (stepsByGroup.has(group)) { + stepsByGroup.get(group).steps.push(...groupSteps); + } else { + stepsByGroup.set(group, step); + } - if (isMainBranch() && !isFork()) { - steps.push({ - label: ":github:", - agents: { - queue: "test-darwin", - }, - depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`), - command: ".buildkite/scripts/upload-release.sh", - }); + steps[i] = undefined; } return { - priority: getPriority(), - steps, + priority, + steps: [...steps.filter(step => typeof step !== "undefined"), ...Array.from(stepsByGroup.values())], }; } async function main() { - printEnvironment(); - - console.log("Checking last successful build..."); - const lastBuild = await getLastSuccessfulBuild(); - if (lastBuild) { - const { id, path, commit_id: commit } = lastBuild; - console.log(" - Build ID:", id); - console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString()); - console.log(" - Commit:", commit); - } else { - console.log(" - No build found"); + startGroup("Generating options..."); + const options = await getPipelineOptions(); + if (options) { + console.log("Generated options:", options); } - let changedFiles; - let changedFilesBranch; - if (!isFork() && !isMainBranch()) { - console.log("Checking changed files..."); - const targetRef = getTargetBranch(); - console.log(" - Target Ref:", targetRef); - const baseRef = lastBuild?.commit_id || targetRef || getMainBranch(); - console.log(" - Base Ref:", baseRef); - const headRef = getCommit(); - console.log(" - Head Ref:", headRef); - - changedFiles = await getChangedFiles(undefined, baseRef, headRef); - changedFilesBranch = await getChangedFiles(undefined, targetRef, headRef); - if (changedFiles) { - if (changedFiles.length) { - changedFiles.forEach(filename => console.log(` - ${filename}`)); - } else { - console.log(" - No changed files"); - } - } + startGroup("Generating pipeline..."); + const pipeline = await getPipeline(options); + if (!pipeline) { + console.log("Generated pipeline is empty, skipping..."); + return; } - const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename); - const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename); - - console.log("Checking if CI should be forced..."); - let forceBuild; - let ciFileChanged; - { - const message = getCommitMessage(); - const match = /\[(force ci|ci force|ci force build)\]/i.exec(message); - if (match) { - const [, reason] = match; - console.log(" - Yes, because commit message contains:", reason); - forceBuild = true; - } - for (const coref of [".buildkite/ci.mjs", "scripts/utils.mjs", "scripts/bootstrap.sh", "scripts/machine.mjs"]) { - if (changedFilesBranch && changedFilesBranch.includes(coref)) { - console.log(" - Yes, because the list of changed files contains:", coref); - forceBuild = true; - ciFileChanged = true; - } - } - } - - console.log("Checking if CI should be skipped..."); - if (!forceBuild) { - const message = getCommitMessage(); - const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message); - if (match) { - const [, reason] = match; - console.log(" - Yes, because commit message contains:", reason); - return; - } - if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) { - console.log(" - Yes, because all changed files are documentation"); - return; - } - } - - console.log("Checking if CI should re-build images..."); - let buildImages; - { - const message = getCommitMessage(); - const match = /\[(build images?|images? build)\]/i.exec(message); - if (match) { - const [, reason] = match; - console.log(" - Yes, because commit message contains:", reason); - buildImages = true; - } - if (ciFileChanged) { - console.log(" - Yes, because a core CI file changed"); - buildImages = true; - } - } - - console.log("Checking if CI should publish images..."); - let publishImages; - { - const message = getCommitMessage(); - const match = /\[(publish images?|images? publish)\]/i.exec(message); - if (match) { - const [, reason] = match; - console.log(" - Yes, because commit message contains:", reason); - publishImages = true; - buildImages = true; - } - if (ciFileChanged && isMainBranch()) { - console.log(" - Yes, because a core CI file changed and this is main branch"); - publishImages = true; - buildImages = true; - } - } - - console.log("Checking if build should be skipped..."); - let skipBuild; - if (!forceBuild) { - const message = getCommitMessage(); - const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message); - if (match) { - const [, reason] = match; - console.log(" - Yes, because commit message contains:", reason); - skipBuild = true; - } - if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) { - console.log(" - Yes, because all changed files are tests or documentation"); - skipBuild = true; - } - } - - console.log("Checking if tests should be skipped..."); - let skipTests; - { - const message = getCommitMessage(); - const match = /\[(skip tests?|tests? skip|no tests?|tests? no)\]/i.exec(message); - if (match) { - console.log(" - Yes, because commit message contains:", match[1]); - skipTests = true; - } - if (isMainBranch()) { - console.log(" - Yes, because we're on main branch"); - skipTests = true; - } - } - - console.log("Checking if build is a named release..."); - let buildRelease; - if (/^(1|true|on|yes)$/i.test(getEnv("RELEASE", false))) { - console.log(" - Yes, because RELEASE environment variable is set"); - buildRelease = true; - } else { - const message = getCommitMessage(); - const match = /\[(release|release build|build release)\]/i.exec(message); - if (match) { - const [, reason] = match; - console.log(" - Yes, because commit message contains:", reason); - buildRelease = true; - } - } - - console.log("Generating pipeline..."); - const pipeline = getPipeline({ - buildId: lastBuild && skipBuild && !forceBuild ? lastBuild.id : undefined, - buildImages, - publishImages, - skipTests, - }); - const content = toYaml(pipeline); const contentPath = join(process.cwd(), ".buildkite", "ci.yml"); - writeFileSync(contentPath, content); + writeFile(contentPath, content); console.log("Generated pipeline:"); console.log(" - Path:", contentPath); console.log(" - Size:", (content.length / 1024).toFixed(), "KB"); - if (isBuildkite) { - await uploadArtifact(contentPath); - } if (isBuildkite) { - console.log("Setting canary revision..."); - const canaryRevision = buildRelease ? 0 : await getCanaryRevision(); - await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`], { stdio: "inherit" }); - - console.log("Uploading pipeline..."); - await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath], { stdio: "inherit" }); + startGroup("Uploading pipeline..."); + try { + await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath], { stdio: "inherit" }); + } finally { + await uploadArtifact(contentPath); + } } } diff --git a/.buildkite/scripts/prepare-build.sh b/.buildkite/scripts/prepare-build.sh index a76370fd7c..b0b3f9f37e 100755 --- a/.buildkite/scripts/prepare-build.sh +++ b/.buildkite/scripts/prepare-build.sh @@ -8,4 +8,4 @@ function run_command() { { set +x; } 2>/dev/null } -run_command node ".buildkite/ci.mjs" +run_command node ".buildkite/ci.mjs" "$@" diff --git a/cmake/Options.cmake b/cmake/Options.cmake index d6cc8582ea..201bf8c8e1 100644 --- a/cmake/Options.cmake +++ b/cmake/Options.cmake @@ -20,7 +20,7 @@ else() setx(RELEASE OFF) endif() -if(CMAKE_BUILD_TYPE MATCHES "Debug|RelWithDebInfo") +if(CMAKE_BUILD_TYPE MATCHES "Debug") setx(DEBUG ON) else() setx(DEBUG OFF) diff --git a/cmake/targets/BuildLolHtml.cmake b/cmake/targets/BuildLolHtml.cmake index 934f8d0be9..3b0d80a723 100644 --- a/cmake/targets/BuildLolHtml.cmake +++ b/cmake/targets/BuildLolHtml.cmake @@ -49,6 +49,8 @@ register_command( CARGO_TERM_VERBOSE=true CARGO_TERM_DIAGNOSTIC=true CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS} + CARGO_HOME=${CARGO_HOME} + RUSTUP_HOME=${RUSTUP_HOME} ) target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY}) diff --git a/cmake/tools/SetupRust.cmake b/cmake/tools/SetupRust.cmake index a83b28bc5f..8a45d243eb 100644 --- a/cmake/tools/SetupRust.cmake +++ b/cmake/tools/SetupRust.cmake @@ -1,15 +1,42 @@ +if(DEFINED ENV{CARGO_HOME}) + set(CARGO_HOME $ENV{CARGO_HOME}) +elseif(CMAKE_HOST_WIN32) + set(CARGO_HOME $ENV{USERPROFILE}/.cargo) + if(NOT EXISTS ${CARGO_HOME}) + set(CARGO_HOME $ENV{PROGRAMFILES}/Rust/cargo) + endif() +else() + set(CARGO_HOME $ENV{HOME}/.cargo) +endif() + +if(DEFINED ENV{RUSTUP_HOME}) + set(RUSTUP_HOME $ENV{RUSTUP_HOME}) +elseif(CMAKE_HOST_WIN32) + set(RUSTUP_HOME $ENV{USERPROFILE}/.rustup) + if(NOT EXISTS ${RUSTUP_HOME}) + set(RUSTUP_HOME $ENV{PROGRAMFILES}/Rust/rustup) + endif() +else() + set(RUSTUP_HOME $ENV{HOME}/.rustup) +endif() + find_command( VARIABLE CARGO_EXECUTABLE COMMAND cargo PATHS - $ENV{HOME}/.cargo/bin + ${CARGO_HOME}/bin REQUIRED OFF ) if(EXISTS ${CARGO_EXECUTABLE}) + if(CARGO_EXECUTABLE MATCHES "^${CARGO_HOME}") + setx(CARGO_HOME ${CARGO_HOME}) + setx(RUSTUP_HOME ${RUSTUP_HOME}) + endif() + return() endif() diff --git a/scripts/agent.mjs b/scripts/agent.mjs index e40b694f6e..e94f0658d0 100755 --- a/scripts/agent.mjs +++ b/scripts/agent.mjs @@ -20,6 +20,8 @@ import { getEnv, writeFile, spawnSafe, + spawn, + mkdir, } from "./utils.mjs"; import { parseArgs } from "node:util"; @@ -49,16 +51,19 @@ async function doBuildkiteAgent(action) { const args = [realpathSync(process.argv[1]), "start"]; if (isWindows) { - const serviceCommand = [ - "New-Service", - "-Name", - "buildkite-agent", - "-StartupType", - "Automatic", - "-BinaryPathName", - `${escape(command)} ${escape(args.map(escape).join(" "))}`, + mkdir(logsPath); + + const nssm = which("nssm", { required: true }); + const nssmCommands = [ + [nssm, "install", "buildkite-agent", command, ...args], + [nssm, "set", "buildkite-agent", "Start", "SERVICE_AUTO_START"], + [nssm, "set", "buildkite-agent", "AppDirectory", homePath], + [nssm, "set", "buildkite-agent", "AppStdout", agentLogPath], + [nssm, "set", "buildkite-agent", "AppStderr", agentLogPath], ]; - await spawnSafe(["powershell", "-Command", serviceCommand.join(" ")], { stdio: "inherit" }); + for (const command of nssmCommands) { + await spawnSafe(command, { stdio: "inherit" }); + } } if (isOpenRc()) { @@ -124,13 +129,21 @@ async function doBuildkiteAgent(action) { token = await getCloudMetadataTag("buildkite:token"); } + if (!token) { + throw new Error( + "Buildkite token not found: either set BUILDKITE_AGENT_TOKEN or add a buildkite:token label to the instance", + ); + } + let shell; if (isWindows) { - const pwsh = which(["pwsh", "powershell"], { required: true }); - shell = `${pwsh} -Command`; + // Command Prompt has a faster startup time than PowerShell. + // Also, it propogates the exit code of the command, which PowerShell does not. + const cmd = which("cmd", { required: true }); + shell = `"${cmd}" /S /C`; } else { - const sh = which(["bash", "sh"], { required: true }); - shell = `${sh} -c`; + const sh = which("sh", { required: true }); + shell = `${sh} -e -c`; } const flags = ["enable-job-log-tmpfile", "no-feature-reporting"]; diff --git a/scripts/bootstrap.ps1 b/scripts/bootstrap.ps1 index eda27d917a..e9a698c941 100755 --- a/scripts/bootstrap.ps1 +++ b/scripts/bootstrap.ps1 @@ -1,6 +1,6 @@ -# Version: 4 -# A powershell script that installs the dependencies needed to build and test Bun. -# This should work on Windows 10 or newer. +# Version: 7 +# A script that installs the dependencies needed to build and test Bun. +# This should work on Windows 10 or newer with PowerShell. # If this script does not work on your machine, please open an issue: # https://github.com/oven-sh/bun/issues @@ -16,6 +16,9 @@ param ( [switch]$Optimize = $CI ) +$ErrorActionPreference = "Stop" +Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force + function Execute-Command { $command = $args -join ' ' Write-Output "$ $command" @@ -43,6 +46,47 @@ function Which { } } +function Execute-Script { + param ( + [Parameter(Mandatory = $true, Position = 0)] + [string]$Path + ) + + $pwsh = Which pwsh powershell -Required + Execute-Command $pwsh $Path +} + +function Download-File { + param ( + [Parameter(Mandatory = $true, Position = 0)] + [string]$Url, + [Parameter(Mandatory = $false)] + [string]$Name, + [Parameter(Mandatory = $false)] + [string]$Path + ) + + if (-not $Name) { + $Name = [System.IO.Path]::ChangeExtension([System.IO.Path]::GetRandomFileName(), [System.IO.Path]::GetExtension($Url)) + } + + if (-not $Path) { + $Path = "$env:TEMP\$Name" + } + + $client = New-Object System.Net.WebClient + for ($i = 0; $i -lt 10 -and -not (Test-Path $Path); $i++) { + try { + $client.DownloadFile($Url, $Path) + } catch { + Write-Warning "Failed to download $Url, retry $i..." + Start-Sleep -s $i + } + } + + return $Path +} + function Install-Chocolatey { if (Which choco) { return @@ -50,7 +94,8 @@ function Install-Chocolatey { Write-Output "Installing Chocolatey..." [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072 - iex -Command ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1')) + $installScript = Download-File "https://community.chocolatey.org/install.ps1" + Execute-Script $installScript Refresh-Path } @@ -96,10 +141,23 @@ function Add-To-Path { } Write-Output "Adding $absolutePath to PATH..." - [Environment]::SetEnvironmentVariable("Path", $newPath, "Machine") + [Environment]::SetEnvironmentVariable("Path", "$newPath", "Machine") Refresh-Path } +function Set-Env { + param ( + [Parameter(Mandatory = $true, Position = 0)] + [string]$Name, + [Parameter(Mandatory = $true, Position = 1)] + [string]$Value + ) + + Write-Output "Setting environment variable $Name=$Value..." + [System.Environment]::SetEnvironmentVariable("$Name", "$Value", "Machine") + [System.Environment]::SetEnvironmentVariable("$Name", "$Value", "Process") +} + function Install-Package { param ( [Parameter(Mandatory = $true, Position = 0)] @@ -137,7 +195,7 @@ function Install-Package { function Install-Packages { foreach ($package in $args) { - Install-Package -Name $package + Install-Package $package } } @@ -145,12 +203,13 @@ function Install-Common-Software { Install-Chocolatey Install-Pwsh Install-Git - Install-Packages curl 7zip + Install-Packages curl 7zip nssm Install-NodeJs Install-Bun Install-Cygwin if ($CI) { - Install-Tailscale + # FIXME: Installing tailscale causes the AWS metadata server to become unreachable + # Install-Tailscale Install-Buildkite } } @@ -204,12 +263,13 @@ function Install-Buildkite { Write-Output "Installing Buildkite agent..." $env:buildkiteAgentToken = "xxx" - iex ((New-Object System.Net.WebClient).DownloadString("https://raw.githubusercontent.com/buildkite/agent/main/install.ps1")) + $installScript = Download-File "https://raw.githubusercontent.com/buildkite/agent/main/install.ps1" + Execute-Script $installScript Refresh-Path } function Install-Build-Essentials { - # Install-Visual-Studio + Install-Visual-Studio Install-Packages ` cmake ` make ` @@ -219,41 +279,42 @@ function Install-Build-Essentials { golang ` nasm ` ruby ` + strawberryperl ` mingw Install-Rust Install-Llvm } function Install-Visual-Studio { - $components = @( - "Microsoft.VisualStudio.Workload.NativeDesktop", - "Microsoft.VisualStudio.Component.Windows10SDK.18362", - "Microsoft.VisualStudio.Component.Windows11SDK.22000", - "Microsoft.VisualStudio.Component.Windows11Sdk.WindowsPerformanceToolkit", - "Microsoft.VisualStudio.Component.VC.ASAN", # C++ AddressSanitizer - "Microsoft.VisualStudio.Component.VC.ATL", # C++ ATL for latest v143 build tools (x86 & x64) - "Microsoft.VisualStudio.Component.VC.DiagnosticTools", # C++ Diagnostic Tools - "Microsoft.VisualStudio.Component.VC.CLI.Support", # C++/CLI support for v143 build tools (Latest) - "Microsoft.VisualStudio.Component.VC.CoreIde", # C++ core features - "Microsoft.VisualStudio.Component.VC.Redist.14.Latest" # C++ 2022 Redistributable Update + param ( + [Parameter(Mandatory = $false)] + [string]$Edition = "community" ) - $arch = (Get-WmiObject Win32_Processor).Architecture - if ($arch -eq 9) { - $components += @( - "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", # MSVC v143 build tools (x86 & x64) - "Microsoft.VisualStudio.Component.VC.Modules.x86.x64" # MSVC v143 C++ Modules for latest v143 build tools (x86 & x64) - ) - } elseif ($arch -eq 5) { - $components += @( - "Microsoft.VisualStudio.Component.VC.Tools.ARM64", # MSVC v143 build tools (ARM64) - "Microsoft.VisualStudio.Component.UWP.VC.ARM64" # C++ Universal Windows Platform support for v143 build tools (ARM64/ARM64EC) - ) - } + Write-Output "Downloading Visual Studio installer..." + $vsInstaller = Download-File "https://aka.ms/vs/17/release/vs_$Edition.exe" - $packageParameters = $components | ForEach-Object { "--add $_" } - Install-Package visualstudio2022community ` - -ExtraArgs "--package-parameters '--add Microsoft.VisualStudio.Workload.NativeDesktop --includeRecommended --includeOptional'" + Write-Output "Installing Visual Studio..." + $vsInstallArgs = @( + "--passive", + "--norestart", + "--wait", + "--force", + "--locale en-US", + "--add Microsoft.VisualStudio.Workload.NativeDesktop", + "--includeRecommended" + ) + $startInfo = New-Object System.Diagnostics.ProcessStartInfo + $startInfo.FileName = $vsInstaller + $startInfo.Arguments = $vsInstallArgs -join ' ' + $startInfo.CreateNoWindow = $true + $process = New-Object System.Diagnostics.Process + $process.StartInfo = $startInfo + $process.Start() + $process.WaitForExit() + if ($process.ExitCode -ne 0) { + throw "Failed to install Visual Studio: code $($process.ExitCode)" + } } function Install-Rust { @@ -261,18 +322,31 @@ function Install-Rust { return } + Write-Output "Installing Rustup..." + $rustupInit = Download-File "https://win.rustup.rs/" -Name "rustup-init.exe" + Write-Output "Installing Rust..." - $rustupInit = "$env:TEMP\rustup-init.exe" - (New-Object System.Net.WebClient).DownloadFile("https://win.rustup.rs/", $rustupInit) Execute-Command $rustupInit -y - Add-To-Path "$env:USERPROFILE\.cargo\bin" + + Write-Output "Moving Rust to $env:ProgramFiles..." + $rustPath = Join-Path $env:ProgramFiles "Rust" + if (-not (Test-Path $rustPath)) { + New-Item -Path $rustPath -ItemType Directory + } + Move-Item "$env:UserProfile\.cargo" "$rustPath\cargo" -Force + Move-Item "$env:UserProfile\.rustup" "$rustPath\rustup" -Force + + Write-Output "Setting environment variables for Rust..." + Set-Env "CARGO_HOME" "$rustPath\cargo" + Set-Env "RUSTUP_HOME" "$rustPath\rustup" + Add-To-Path "$rustPath\cargo\bin" } function Install-Llvm { Install-Package llvm ` -Command clang-cl ` -Version "18.1.8" - Add-To-Path "C:\Program Files\LLVM\bin" + Add-To-Path "$env:ProgramFiles\LLVM\bin" } function Optimize-System { @@ -280,6 +354,9 @@ function Optimize-System { Disable-Windows-Threat-Protection Disable-Windows-Services Disable-Power-Management +} + +function Optimize-System-Needs-Reboot { Uninstall-Windows-Defender } @@ -319,7 +396,7 @@ function Disable-Windows-Services { } function Disable-Power-Management { - Write-Output "Disabling power management features..." + Write-Output "Disabling Power Management..." powercfg /setactive 8c5e7fda-e8bf-4a96-9a85-a6e23a8c635c # High performance powercfg /change monitor-timeout-ac 0 powercfg /change monitor-timeout-dc 0 @@ -329,7 +406,6 @@ function Disable-Power-Management { powercfg /change hibernate-timeout-dc 0 } -Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force if ($Optimize) { Optimize-System } @@ -337,3 +413,6 @@ if ($Optimize) { Install-Common-Software Install-Build-Essentials +if ($Optimize) { + Optimize-System-Needs-Reboot +} diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 871340f1fc..18defcfe88 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Version: 5 +# Version: 7 # A script that installs the dependencies needed to build and test Bun. # This should work on macOS and Linux with a POSIX shell. @@ -92,7 +92,7 @@ download_file() { execute chmod 755 "$tmp" path="$tmp/$filename" - fetch "$url" > "$path" + fetch "$url" >"$path" execute chmod 644 "$path" print "$path" @@ -112,14 +112,23 @@ append_to_file() { file="$1" content="$2" - if ! [ -f "$file" ]; then + file_needs_sudo="0" + if [ -f "$file" ]; then + if ! [ -r "$file" ] || ! [ -w "$file" ]; then + file_needs_sudo="1" + fi + else execute_as_user mkdir -p "$(dirname "$file")" execute_as_user touch "$file" fi echo "$content" | while read -r line; do if ! grep -q "$line" "$file"; then - echo "$line" >>"$file" + if [ "$file_needs_sudo" = "1" ]; then + execute_sudo sh -c "echo '$line' >> '$file'" + else + echo "$line" >>"$file" + fi fi done } @@ -135,7 +144,7 @@ append_to_file_sudo() { echo "$content" | while read -r line; do if ! grep -q "$line" "$file"; then - echo "$line" | execute_sudo tee "$file" > /dev/null + echo "$line" | execute_sudo tee "$file" >/dev/null fi done } @@ -161,18 +170,21 @@ append_to_path() { export PATH="$path:$PATH" } -link_to_bin() { - path="$1" - if ! [ -d "$path" ]; then - error "Could not find directory: \"$path\"" +move_to_bin() { + exe_path="$1" + if ! [ -f "$exe_path" ]; then + error "Could not find executable: \"$exe_path\"" fi - for file in "$path"/*; do - if [ -f "$file" ]; then - grant_to_user "$file" - execute_sudo ln -sf "$file" "/usr/bin/$(basename "$file")" + usr_paths="/usr/bin /usr/local/bin" + for usr_path in $usr_paths; do + if [ -d "$usr_path" ] && [ -w "$usr_path" ]; then + break fi done + + grant_to_user "$exe_path" + execute_sudo mv -f "$exe_path" "$usr_path/$(basename "$exe_path")" } check_features() { @@ -384,6 +396,74 @@ check_user() { fi } +check_ulimit() { + if ! [ "$ci" = "1" ]; then + return + fi + + print "Checking ulimits..." + systemd_conf="/etc/systemd/system.conf" + if [ -f "$systemd_conf" ]; then + limits_conf="/etc/security/limits.d/99-unlimited.conf" + if ! [ -f "$limits_conf" ]; then + execute_sudo mkdir -p "$(dirname "$limits_conf")" + execute_sudo touch "$limits_conf" + fi + fi + + limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue" + for limit in $limits; do + limit_upper="$(echo "$limit" | tr '[:lower:]' '[:upper:]')" + + limit_value="unlimited" + case "$limit" in + nofile | nproc) + limit_value="1048576" + ;; + esac + + if [ -f "$limits_conf" ]; then + limit_users="root *" + for limit_user in $limit_users; do + append_to_file "$limits_conf" "$limit_user soft $limit $limit_value" + append_to_file "$limits_conf" "$limit_user hard $limit $limit_value" + done + fi + + if [ -f "$systemd_conf" ]; then + append_to_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value" + fi + done + + rc_conf="/etc/rc.conf" + if [ -f "$rc_conf" ]; then + rc_ulimit="" + limit_flags="c d e f i l m n q r s t u v x" + for limit_flag in $limit_flags; do + limit_value="unlimited" + case "$limit_flag" in + n | u) + limit_value="1048576" + ;; + esac + rc_ulimit="$rc_ulimit -$limit_flag $limit_value" + done + append_to_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\"" + fi + + pam_confs="/etc/pam.d/common-session /etc/pam.d/common-session-noninteractive" + for pam_conf in $pam_confs; do + if [ -f "$pam_conf" ]; then + append_to_file "$pam_conf" "session optional pam_limits.so" + fi + done + + systemctl="$(which systemctl)" + if [ -f "$systemctl" ]; then + execute_sudo "$systemctl" daemon-reload + fi +} + package_manager() { case "$pm" in apt) @@ -602,6 +682,14 @@ install_nodejs_headers() { } install_bun() { + case "$pm" in + apk) + install_packages \ + libgcc \ + libstdc++ + ;; + esac + bash="$(require bash)" script=$(download_file "https://bun.sh/install") @@ -615,7 +703,10 @@ install_bun() { ;; esac - link_to_bin "$home/.bun/bin" + move_to_bin "$home/.bun/bin/bun" + bun_path="$(which bun)" + bunx_path="$(dirname "$bun_path")/bunx" + execute_sudo ln -sf "$bun_path" "$bunx_path" } install_cmake() { @@ -628,14 +719,14 @@ install_cmake() { cmake_version="3.30.5" case "$arch" in x64) - url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-x86_64.sh" + cmake_url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-x86_64.sh" ;; aarch64) - url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-aarch64.sh" + cmake_url="https://github.com/Kitware/CMake/releases/download/v$cmake_version/cmake-$cmake_version-linux-aarch64.sh" ;; esac - script=$(download_file "$url") - execute_sudo "$sh" "$script" \ + cmake_script=$(download_file "$cmake_url") + execute_sudo "$sh" "$cmake_script" \ --skip-license \ --prefix=/usr ;; @@ -732,13 +823,13 @@ install_llvm() { case "$pm" in apt) bash="$(require bash)" - script="$(download_file "https://apt.llvm.org/llvm.sh")" + llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" case "$distro-$release" in ubuntu-24*) - execute_sudo "$bash" "$script" "$(llvm_version)" all -njammy + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy ;; *) - execute_sudo "$bash" "$script" "$(llvm_version)" all + execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all ;; esac ;; @@ -779,11 +870,6 @@ install_rust() { execute_as_user "$sh" "$script" -y ;; esac - - # FIXME: This causes cargo to fail to build: - # > error: rustup could not choose a version of cargo to run, - # > because one wasn't specified explicitly, and no default is configured. - # link_to_bin "$home/.cargo/bin" } install_docker() { @@ -796,7 +882,7 @@ install_docker() { *) case "$distro-$release" in amzn-2 | amzn-1) - execute amazon-linux-extras install docker + execute_sudo amazon-linux-extras install docker ;; amzn-* | alpine-*) install_packages docker @@ -832,8 +918,8 @@ install_tailscale() { case "$os" in linux) sh="$(require sh)" - script=$(download_file "https://tailscale.com/install.sh") - execute "$sh" "$script" + tailscale_script=$(download_file "https://tailscale.com/install.sh") + execute "$sh" "$tailscale_script" ;; darwin) install_packages go @@ -862,24 +948,39 @@ create_buildkite_user() { esac if [ -z "$(getent passwd "$user")" ]; then - execute_sudo useradd "$user" \ - --system \ - --no-create-home \ - --home-dir "$home" + case "$distro" in + alpine) + execute_sudo addgroup \ + --system "$group" + execute_sudo adduser "$user" \ + --system \ + --ingroup "$group" \ + --shell "$(require sh)" \ + --home "$home" \ + --disabled-password + ;; + *) + execute_sudo useradd "$user" \ + --system \ + --shell "$(require sh)" \ + --no-create-home \ + --home-dir "$home" + ;; + esac fi if [ -n "$(getent group docker)" ]; then execute_sudo usermod -aG docker "$user" fi - paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock" - for path in $paths; do + buildkite_paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock" + for path in $buildkite_paths; do execute_sudo mkdir -p "$path" execute_sudo chown -R "$user:$group" "$path" done - files="/var/run/buildkite-agent/buildkite-agent.pid" - for file in $files; do + buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid" + for file in $buildkite_files; do execute_sudo touch "$file" execute_sudo chown "$user:$group" "$file" done @@ -890,19 +991,42 @@ install_buildkite() { return fi - bash="$(require bash)" - script="$(download_file "https://raw.githubusercontent.com/buildkite/agent/main/install.sh")" - tmp_dir="$(execute dirname "$script")" - HOME="$tmp_dir" execute "$bash" "$script" + buildkite_version="3.87.0" + case "$os-$arch" in + linux-aarch64) + buildkite_filename="buildkite-agent-linux-arm64-$buildkite_version.tar.gz" + ;; + linux-x64) + buildkite_filename="buildkite-agent-linux-amd64-$buildkite_version.tar.gz" + ;; + darwin-aarch64) + buildkite_filename="buildkite-agent-darwin-arm64-$buildkite_version.tar.gz" + ;; + darwin-x64) + buildkite_filename="buildkite-agent-darwin-amd64-$buildkite_version.tar.gz" + ;; + esac + buildkite_url="https://github.com/buildkite/agent/releases/download/v$buildkite_version/$buildkite_filename" + buildkite_filepath="$(download_file "$buildkite_url" "$buildkite_filename")" + buildkite_tmpdir="$(dirname "$buildkite_filepath")" - out_dir="$tmp_dir/.buildkite-agent" - execute_sudo mv -f "$out_dir/bin/buildkite-agent" "/usr/bin/buildkite-agent" + execute tar -xzf "$buildkite_filepath" -C "$buildkite_tmpdir" + move_to_bin "$buildkite_tmpdir/buildkite-agent" + execute rm -rf "$buildkite_tmpdir" } -install_chrome_dependencies() { +install_chromium() { # https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-doesnt-launch-on-linux # https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-in-the-cloud case "$pm" in + apk) + install_packages \ + chromium \ + nss \ + freetype \ + harfbuzz \ + ttf-freefont + ;; apt) install_packages \ fonts-liberation \ @@ -979,22 +1103,17 @@ install_chrome_dependencies() { esac } -raise_file_descriptor_limit() { - append_to_file_sudo /etc/security/limits.conf '* soft nofile 262144' - append_to_file_sudo /etc/security/limits.conf '* hard nofile 262144' -} - main() { check_features "$@" check_operating_system check_inside_docker check_user + check_ulimit check_package_manager create_buildkite_user install_common_software install_build_essentials - install_chrome_dependencies - raise_file_descriptor_limit # XXX: temporary + install_chromium } main "$@" diff --git a/scripts/build.mjs b/scripts/build.mjs index a35c21eac3..2fab14a959 100755 --- a/scripts/build.mjs +++ b/scripts/build.mjs @@ -3,6 +3,7 @@ import { spawn as nodeSpawn } from "node:child_process"; import { existsSync, readFileSync, mkdirSync, cpSync, chmodSync } from "node:fs"; import { basename, join, resolve } from "node:path"; +import { isCI, printEnvironment, startGroup } from "./utils.mjs"; // https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem const generateFlags = [ @@ -37,6 +38,10 @@ async function build(args) { return spawn("pwsh", ["-NoProfile", "-NoLogo", "-File", shellPath, process.argv0, scriptPath, ...args]); } + if (isCI) { + printEnvironment(); + } + const env = { ...process.env, FORCE_COLOR: "1", @@ -102,7 +107,8 @@ async function build(args) { const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) => flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value], ); - await spawn("cmake", generateArgs, { env }, "configuration"); + + await startGroup("CMake Configure", () => spawn("cmake", generateArgs, { env })); const envPath = resolve(buildPath, ".env"); if (existsSync(envPath)) { @@ -116,7 +122,8 @@ async function build(args) { const buildArgs = Object.entries(buildOptions) .sort(([a], [b]) => (a === "--build" ? -1 : a.localeCompare(b))) .flatMap(([flag, value]) => [flag, value]); - await spawn("cmake", buildArgs, { env }, "compilation"); + + await startGroup("CMake Build", () => spawn("cmake", buildArgs, { env })); printDuration("total", Date.now() - startTime); } diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 3ddfd6ac3a..479dbb4cfd 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -18,45 +18,740 @@ import { waitForPort, which, escapePowershell, + getGithubUrl, + getGithubApiUrl, + curlSafe, + mkdtemp, + writeFile, + copyFile, + isMacOS, + mkdir, + rm, + homedir, + isWindows, + sha256, + isPrivileged, } from "./utils.mjs"; -import { join, relative, resolve } from "node:path"; -import { homedir } from "node:os"; -import { existsSync, mkdirSync, mkdtempSync, readdirSync } from "node:fs"; +import { basename, extname, join, relative, resolve } from "node:path"; +import { existsSync, mkdtempSync, readdirSync } from "node:fs"; import { fileURLToPath } from "node:url"; +/** + * @link https://tart.run/ + * @link https://github.com/cirruslabs/tart + */ +const tart = { + get name() { + return "tart"; + }, + + /** + * @param {string[]} args + * @param {import("./utils.mjs").SpawnOptions} options + * @returns {Promise} + */ + async spawn(args, options) { + const tart = which("tart", { required: true }); + const { json } = options || {}; + const command = json ? [tart, ...args, "--format=json"] : [tart, ...args]; + + const { stdout } = await spawnSafe(command, options); + if (!json) { + return stdout; + } + + try { + return JSON.parse(stdout); + } catch { + return; + } + }, + + /** + * @typedef {"sequoia" | "sonoma" | "ventura" | "monterey"} TartDistro + * @typedef {`ghcr.io/cirruslabs/macos-${TartDistro}-xcode`} TartImage + * @link https://github.com/orgs/cirruslabs/packages?repo_name=macos-image-templates + */ + + /** + * @param {Platform} platform + * @returns {TartImage} + */ + getImage(platform) { + const { os, arch, release } = platform; + if (os !== "darwin" || arch !== "aarch64") { + throw new Error(`Unsupported platform: ${inspect(platform)}`); + } + const distros = { + "15": "sequoia", + "14": "sonoma", + "13": "ventura", + "12": "monterey", + }; + const distro = distros[release]; + if (!distro) { + throw new Error(`Unsupported macOS release: ${distro}`); + } + return `ghcr.io/cirruslabs/macos-${distro}-xcode`; + }, + + /** + * @typedef {Object} TartVm + * @property {string} Name + * @property {"running" | "stopped"} State + * @property {"local"} Source + * @property {number} Size + * @property {number} Disk + * @property {number} [CPU] + * @property {number} [Memory] + */ + + /** + * @returns {Promise} + */ + async listVms() { + return this.spawn(["list"], { json: true }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async getVm(name) { + const result = await this.spawn(["get", name], { + json: true, + throwOnError: error => !/does not exist/i.test(inspect(error)), + }); + return { + Name: name, + ...result, + }; + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async stopVm(name) { + await this.spawn(["stop", name, "--timeout=0"], { + throwOnError: error => !/does not exist|is not running/i.test(inspect(error)), + }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async deleteVm(name) { + await this.stopVm(name); + await this.spawn(["delete", name], { + throwOnError: error => !/does not exist/i.test(inspect(error)), + }); + }, + + /** + * @param {string} name + * @param {TartImage} image + * @returns {Promise} + */ + async cloneVm(name, image) { + const localName = image.split("/").pop(); + const localVm = await this.getVm(localName); + if (localVm) { + const { Name } = localVm; + await this.spawn(["clone", Name, name]); + return; + } + + console.log(`Cloning macOS image: ${image} (this will take a long time)`); + await this.spawn(["clone", image, localName]); + await this.spawn(["clone", localName, name]); + }, + + /** + * @typedef {Object} TartMount + * @property {boolean} [readOnly] + * @property {string} source + * @property {string} destination + */ + + /** + * @typedef {Object} TartVmOptions + * @property {number} [cpuCount] + * @property {number} [memoryGb] + * @property {number} [diskSizeGb] + * @property {boolean} [no-graphics] + * @property {boolean} [no-audio] + * @property {boolean} [no-clipboard] + * @property {boolean} [recovery] + * @property {boolean} [vnc] + * @property {boolean} [vnc-experimental] + * @property {boolean} [net-softnet] + * @property {TartMount[]} [dir] + */ + + /** + * @param {string} name + * @param {TartVmOptions} options + * @returns {Promise} + */ + async runVm(name, options = {}) { + const { cpuCount, memoryGb, diskSizeGb, dir, ...vmOptions } = options; + + const setArgs = ["--random-mac", "--random-serial"]; + if (cpuCount) { + setArgs.push(`--cpu=${cpuCount}`); + } + if (memoryGb) { + setArgs.push(`--memory=${memoryGb}`); + } + if (diskSizeGb) { + setArgs.push(`--disk-size=${diskSizeGb}`); + } + await this.spawn(["set", name, ...setArgs]); + + const args = Object.entries(vmOptions) + .filter(([, value]) => value !== undefined) + .flatMap(([key, value]) => (typeof value === "boolean" ? (value ? [`--${key}`] : []) : [`--${key}=${value}`])); + if (dir?.length) { + args.push( + ...dir.map(({ source, destination, readOnly }) => `--dir=${source}:${destination}${readOnly ? ":ro" : ""}`), + ); + } + + // This command is blocking, so it needs to be detached and not awaited + this.spawn(["run", name, ...args], { detached: true }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async getVmIp(name) { + const stdout = await this.spawn(["ip", name], { + retryOnError: error => /no IP address found/i.test(inspect(error)), + throwOnError: error => !/does not exist/i.test(inspect(error)), + }); + return stdout?.trim(); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { name, imageName, cpuCount, memoryGb, diskSizeGb, rdp } = options; + + const image = imageName || this.getImage(options); + const machineId = name || `i-${Math.random().toString(36).slice(2, 11)}`; + await this.cloneVm(machineId, image); + + await this.runVm(machineId, { + cpuCount, + memoryGb, + diskSizeGb, + "net-softnet": isPrivileged(), + "no-audio": true, + "no-clipboard": true, + "no-graphics": true, + "vnc-experimental": rdp, + }); + + return this.toMachine(machineId); + }, + + /** + * @param {string} name + * @returns {Machine} + */ + toMachine(name) { + const connect = async () => { + const hostname = await this.getVmIp(name); + return { + hostname, + // hardcoded by base images + username: "admin", + password: "admin", + }; + }; + + const exec = async (command, options) => { + const connectOptions = await connect(); + return spawnSsh({ ...connectOptions, command }, options); + }; + + const execSafe = async (command, options) => { + const connectOptions = await connect(); + return spawnSshSafe({ ...connectOptions, command }, options); + }; + + const attach = async () => { + const connectOptions = await connect(); + await spawnSshSafe({ ...connectOptions }); + }; + + const upload = async (source, destination) => { + const connectOptions = await connect(); + await spawnScp({ ...connectOptions, source, destination }); + }; + + const rdp = async () => { + const connectOptions = await connect(); + await spawnRdp({ ...connectOptions }); + }; + + const close = async () => { + await this.deleteVm(name); + }; + + return { + cloud: "tart", + id: name, + spawn: exec, + spawnSafe: execSafe, + attach, + upload, + close, + [Symbol.asyncDispose]: close, + }; + }, +}; + +/** + * @link https://docs.orbstack.dev/ + */ +const orbstack = { + get name() { + return "orbstack"; + }, + + /** + * @typedef {Object} OrbstackImage + * @property {string} distro + * @property {string} version + * @property {string} arch + */ + + /** + * @param {Platform} platform + * @returns {OrbstackImage} + */ + getImage(platform) { + const { os, arch, distro, release } = platform; + if (os !== "linux" || !/^debian|ubuntu|alpine|fedora|centos$/.test(distro)) { + throw new Error(`Unsupported platform: ${inspect(platform)}`); + } + + return { + distro, + version: release, + arch: arch === "aarch64" ? "arm64" : "amd64", + }; + }, + + /** + * @typedef {Object} OrbstackVm + * @property {string} id + * @property {string} name + * @property {"running"} state + * @property {OrbstackImage} image + * @property {OrbstackConfig} config + */ + + /** + * @typedef {Object} OrbstackConfig + * @property {string} default_username + * @property {boolean} isolated + */ + + /** + * @typedef {Object} OrbstackVmOptions + * @property {string} [name] + * @property {OrbstackImage} image + * @property {string} [username] + * @property {string} [password] + * @property {string} [userData] + */ + + /** + * @param {OrbstackVmOptions} options + * @returns {Promise} + */ + async createVm(options) { + const { name, image, username, password, userData } = options; + const { distro, version, arch } = image; + const uniqueId = name || `linux-${distro}-${version}-${arch}-${Math.random().toString(36).slice(2, 11)}`; + + const args = [`--arch=${arch}`, `${distro}:${version}`, uniqueId]; + if (username) { + args.push(`--user=${username}`); + } + if (password) { + args.push(`--set-password=${password}`); + } + + let userDataPath; + if (userData) { + userDataPath = mkdtemp("orbstack-user-data-", "user-data.txt"); + writeFile(userDataPath, userData); + args.push(`--user-data=${userDataPath}`); + } + + try { + await spawnSafe($`orbctl create ${args}`); + } finally { + if (userDataPath) { + rm(userDataPath); + } + } + + return this.inspectVm(uniqueId); + }, + + /** + * @param {string} name + */ + async deleteVm(name) { + await spawnSafe($`orbctl delete ${name}`, { + throwOnError: error => !/machine not found/i.test(inspect(error)), + }); + }, + + /** + * @param {string} name + * @returns {Promise} + */ + async inspectVm(name) { + const { exitCode, stdout } = await spawnSafe($`orbctl info ${name} --format=json`, { + throwOnError: error => !/machine not found/i.test(inspect(error)), + }); + if (exitCode === 0) { + return JSON.parse(stdout); + } + }, + + /** + * @returns {Promise} + */ + async listVms() { + const { stdout } = await spawnSafe($`orbctl list --format=json`); + return JSON.parse(stdout); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { distro } = options; + const username = getUsername(distro); + const userData = getUserData({ ...options, username }); + + const image = this.getImage(options); + const vm = await this.createVm({ + image, + username, + userData, + }); + + return this.toMachine(vm, options); + }, + + /** + * @param {OrbstackVm} vm + * @returns {Machine} + */ + toMachine(vm) { + const { id, name, config } = vm; + + const { default_username: username } = config; + const connectOptions = { + username, + hostname: `${name}@orb`, + }; + + const exec = async (command, options) => { + return spawnSsh({ ...connectOptions, command }, options); + }; + + const execSafe = async (command, options) => { + return spawnSshSafe({ ...connectOptions, command }, options); + }; + + const attach = async () => { + await spawnSshSafe({ ...connectOptions }); + }; + + const upload = async (source, destination) => { + await spawnSafe(["orbctl", "push", `--machine=${name}`, source, destination]); + }; + + const close = async () => { + await this.deleteVm(name); + }; + + return { + cloud: "orbstack", + id, + name, + spawn: exec, + spawnSafe: execSafe, + upload, + attach, + close, + [Symbol.asyncDispose]: close, + }; + }, +}; + const docker = { + get name() { + return "docker"; + }, + + /** + * @typedef {"linux" | "darwin" | "windows"} DockerOs + * @typedef {"amd64" | "arm64"} DockerArch + * @typedef {`${DockerOs}/${DockerArch}`} DockerPlatform + */ + + /** + * @param {Platform} platform + * @returns {DockerPlatform} + */ getPlatform(platform) { const { os, arch } = platform; + if (arch === "aarch64") { + return `${os}/arm64`; + } else if (arch === "x64") { + return `${os}/amd64`; + } + throw new Error(`Unsupported platform: ${inspect(platform)}`); + }, - if (os === "linux" || os === "windows") { - if (arch === "aarch64") { - return `${os}/arm64`; - } else if (arch === "x64") { - return `${os}/amd64`; + /** + * @typedef DockerSpawnOptions + * @property {DockerPlatform} [platform] + * @property {boolean} [json] + */ + + /** + * @param {string[]} args + * @param {DockerSpawnOptions & import("./utils.mjs").SpawnOptions} [options] + * @returns {Promise} + */ + async spawn(args, options = {}) { + const docker = which("docker", { required: true }); + + let env = { ...process.env }; + if (isCI) { + env["BUILDKIT_PROGRESS"] = "plain"; + } + + const { json, platform } = options; + if (json) { + args.push("--format=json"); + } + if (platform) { + args.push(`--platform=${platform}`); + } + + const { error, stdout } = await spawnSafe($`${docker} ${args}`, { env, ...options }); + if (error) { + return; + } + if (!json) { + return stdout; + } + + try { + return JSON.parse(stdout); + } catch { + return; + } + }, + + /** + * @typedef {Object} DockerImage + * @property {string} Id + * @property {string[]} RepoTags + * @property {string[]} RepoDigests + * @property {string} Created + * @property {DockerOs} Os + * @property {DockerArch} Architecture + * @property {number} Size + */ + + /** + * @param {string} url + * @param {DockerPlatform} [platform] + * @returns {Promise} + */ + async pullImage(url, platform) { + const done = await this.spawn($`pull ${url}`, { + platform, + throwOnError: error => !/No such image|manifest unknown/i.test(inspect(error)), + }); + return !!done; + }, + + /** + * @param {string} url + * @param {DockerPlatform} [platform] + * @returns {Promise} + */ + async inspectImage(url, platform) { + /** @type {DockerImage[]} */ + const images = await this.spawn($`image inspect ${url}`, { + json: true, + throwOnError: error => !/No such image/i.test(inspect(error)), + }); + + if (!images) { + const pulled = await this.pullImage(url, platform); + if (pulled) { + return this.inspectImage(url, platform); + } + } + + const { os, arch } = platform || {}; + return images + ?.filter(({ Os, Architecture }) => !os || !arch || (Os === os && Architecture === arch)) + ?.find((a, b) => (a.Created < b.Created ? 1 : -1)); + }, + + /** + * @typedef {Object} DockerContainer + * @property {string} Id + * @property {string} Name + * @property {string} Image + * @property {string} Created + * @property {DockerContainerState} State + * @property {DockerContainerNetworkSettings} NetworkSettings + */ + + /** + * @typedef {Object} DockerContainerState + * @property {"exited" | "running"} Status + * @property {number} [Pid] + * @property {number} ExitCode + * @property {string} [Error] + * @property {string} StartedAt + * @property {string} FinishedAt + */ + + /** + * @typedef {Object} DockerContainerNetworkSettings + * @property {string} [IPAddress] + */ + + /** + * @param {string} containerId + * @returns {Promise} + */ + async inspectContainer(containerId) { + const containers = await this.spawn($`container inspect ${containerId}`, { json: true }); + return containers?.find(a => a.Id === containerId); + }, + + /** + * @returns {Promise} + */ + async listContainers() { + const containers = await this.spawn($`container ls --all`, { json: true }); + return containers || []; + }, + + /** + * @typedef {Object} DockerRunOptions + * @property {string[]} [command] + * @property {DockerPlatform} [platform] + * @property {string} [name] + * @property {boolean} [detach] + * @property {"always" | "never"} [pull] + * @property {boolean} [rm] + * @property {"no" | "on-failure" | "always"} [restart] + */ + + /** + * @param {string} url + * @param {DockerRunOptions} [options] + * @returns {Promise} + */ + async runContainer(url, options = {}) { + const { detach, command = [], ...containerOptions } = options; + const args = Object.entries(containerOptions) + .filter(([_, value]) => typeof value !== "undefined") + .map(([key, value]) => (typeof value === "boolean" ? `--${key}` : `--${key}=${value}`)); + if (detach) { + args.push("--detach"); + } else { + args.push("--tty", "--interactive"); + } + + const stdio = detach ? "pipe" : "inherit"; + const result = await this.spawn($`run ${args} ${url} ${command}`, { stdio }); + if (!detach) { + return; + } + + const containerId = result.trim(); + const container = await this.inspectContainer(containerId); + if (!container) { + throw new Error(`Failed to run container: ${inspect(result)}`); + } + return container; + }, + + /** + * @param {Platform} platform + * @returns {Promise} + */ + async getBaseImage(platform) { + const { os, distro, release } = platform; + const dockerPlatform = this.getPlatform(platform); + + let url; + if (os === "linux") { + if (distro === "debian" || distro === "ubuntu" || distro === "alpine") { + url = `docker.io/library/${distro}:${release}`; + } else if (distro === "amazonlinux") { + url = `public.ecr.aws/amazonlinux/amazonlinux:${release}`; + } + } + + if (url) { + const image = await this.inspectImage(url, dockerPlatform); + if (image) { + return image; } } throw new Error(`Unsupported platform: ${inspect(platform)}`); }, - async createMachine(platform) { - const { id } = await docker.getImage(platform); - const platformString = docker.getPlatform(platform); + /** + * @param {DockerContainer} container + * @param {MachineOptions} [options] + * @returns {Machine} + */ + toMachine(container, options = {}) { + const { Id: containerId } = container; - const command = ["sleep", "1d"]; - const { stdout } = await spawnSafe(["docker", "run", "--rm", "--platform", platformString, "-d", id, ...command]); - const containerId = stdout.trim(); - - const spawn = async command => { - return spawn(["docker", "exec", containerId, ...command]); + const exec = (command, options) => { + return spawn(["docker", "exec", containerId, ...command], options); }; - const spawnSafe = async command => { - return spawnSafe(["docker", "exec", containerId, ...command]); + const execSafe = (command, options) => { + return spawnSafe(["docker", "exec", containerId, ...command], options); + }; + + const upload = async (source, destination) => { + await spawn(["docker", "cp", source, `${containerId}:${destination}`]); }; const attach = async () => { - const { exitCode, spawnError } = await spawn(["docker", "exec", "-it", containerId, "bash"], { + const { exitCode, error } = await spawn(["docker", "exec", "-it", containerId, "sh"], { stdio: "inherit", }); @@ -64,69 +759,60 @@ const docker = { return; } - throw spawnError; + throw error; + }; + + const snapshot = async name => { + await spawn(["docker", "commit", containerId]); }; const kill = async () => { - await spawnSafe(["docker", "kill", containerId]); + await spawn(["docker", "kill", containerId]); }; return { - spawn, - spawnSafe, + cloud: "docker", + id: containerId, + spawn: exec, + spawnSafe: execSafe, + upload, attach, close: kill, [Symbol.asyncDispose]: kill, }; }, - async getImage(platform) { - const os = platform["os"]; - const distro = platform["distro"]; - const release = platform["release"] || "latest"; + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { Id: imageId, Os, Architecture } = await docker.getBaseImage(options); - let url; - if (os === "linux") { - if (distro === "debian") { - url = `docker.io/library/debian:${release}`; - } else if (distro === "ubuntu") { - url = `docker.io/library/ubuntu:${release}`; - } else if (distro === "amazonlinux") { - url = `public.ecr.aws/amazonlinux/amazonlinux:${release}`; - } else if (distro === "alpine") { - url = `docker.io/library/alpine:${release}`; - } - } + const container = await docker.runContainer(imageId, { + platform: `${Os}/${Architecture}`, + command: ["sleep", "1d"], + detach: true, + rm: true, + restart: "no", + }); - if (url) { - await spawnSafe(["docker", "pull", "--platform", docker.getPlatform(platform), url]); - const { stdout } = await spawnSafe(["docker", "image", "inspect", url, "--format", "json"]); - const [{ Id }] = JSON.parse(stdout); - return { - id: Id, - name: url, - username: "root", - }; - } - - throw new Error(`Unsupported platform: ${inspect(platform)}`); + return this.toMachine(container, options); }, }; -export const aws = { +const aws = { get name() { return "aws"; }, /** * @param {string[]} args + * @param {import("./utils.mjs").SpawnOptions} [options] * @returns {Promise} */ - async spawn(args) { - const aws = which("aws"); - if (!aws) { - throw new Error("AWS CLI is not installed, please install it"); - } + async spawn(args, options = {}) { + const aws = which("aws", { required: true }); let env; if (isCI) { @@ -137,14 +823,7 @@ export const aws = { }; } - const { error, stdout } = await spawn($`${aws} ${args} --output json`, { env }); - if (error) { - if (/max attempts exceeded/i.test(inspect(error))) { - return this.spawn(args); - } - throw error; - } - + const { stdout } = await spawnSafe($`${aws} ${args} --output json`, { env, ...options }); try { return JSON.parse(stdout); } catch { @@ -202,9 +881,28 @@ export const aws = { * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/run-instances.html */ async runInstances(options) { - const flags = aws.getFlags(options); - const { Instances } = await aws.spawn($`ec2 run-instances ${flags}`); - return Instances.sort((a, b) => (a.LaunchTime < b.LaunchTime ? 1 : -1)); + for (let i = 0; i < 3; i++) { + const flags = aws.getFlags(options); + const result = await aws.spawn($`ec2 run-instances ${flags}`, { + throwOnError: error => { + if (options["instance-market-options"] && /InsufficientInstanceCapacity/i.test(inspect(error))) { + delete options["instance-market-options"]; + const instanceType = options["instance-type"] || "default"; + console.warn(`There is not enough capacity for ${instanceType} spot instances, retrying with on-demand...`); + return false; + } + return true; + }, + }); + if (result) { + const { Instances } = result; + if (Instances.length) { + return Instances.sort((a, b) => (a.LaunchTime < b.LaunchTime ? 1 : -1)); + } + } + await new Promise(resolve => setTimeout(resolve, i * Math.random() * 15_000)); + } + throw new Error(`Failed to run instances: ${inspect(instanceOptions)}`); }, /** @@ -220,7 +918,9 @@ export const aws = { * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/terminate-instances.html */ async terminateInstances(...instanceIds) { - await aws.spawn($`ec2 terminate-instances --instance-ids ${instanceIds}`); + await aws.spawn($`ec2 terminate-instances --instance-ids ${instanceIds}`, { + throwOnError: error => !/InvalidInstanceID\.NotFound/i.test(inspect(error)), + }); }, /** @@ -229,7 +929,29 @@ export const aws = { * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/wait.html */ async waitInstances(action, ...instanceIds) { - await aws.spawn($`ec2 wait ${action} --instance-ids ${instanceIds}`); + await aws.spawn($`ec2 wait ${action} --instance-ids ${instanceIds}`, { + retryOnError: error => /max attempts exceeded/i.test(inspect(error)), + }); + }, + + /** + * @param {string} instanceId + * @param {string} privateKeyPath + * @param {object} [passwordOptions] + * @param {boolean} [passwordOptions.wait] + * @returns {Promise} + * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/get-password-data.html + */ + async getPasswordData(instanceId, privateKeyPath, passwordOptions = {}) { + const attempts = passwordOptions.wait ? 15 : 1; + for (let i = 0; i < attempts; i++) { + const { PasswordData } = await aws.spawn($`ec2 get-password-data --instance-id ${instanceId}`); + if (PasswordData) { + return decryptPassword(PasswordData, privateKeyPath); + } + await new Promise(resolve => setTimeout(resolve, 60000 * i)); + } + throw new Error(`Failed to get password data for instance: ${instanceId}`); }, /** @@ -262,19 +984,31 @@ export const aws = { */ async createImage(options) { const flags = aws.getFlags(options); - try { - const { ImageId } = await aws.spawn($`ec2 create-image ${flags}`); - return ImageId; - } catch (error) { - const match = /already in use by AMI (ami-[a-z0-9]+)/i.exec(inspect(error)); - if (!match) { - throw error; - } - const [, existingImageId] = match; - await aws.spawn($`ec2 deregister-image --image-id ${existingImageId}`); - const { ImageId } = await aws.spawn($`ec2 create-image ${flags}`); + + /** @type {string | undefined} */ + let existingImageId; + + /** @type {AwsImage | undefined} */ + const image = await aws.spawn($`ec2 create-image ${flags}`, { + throwOnError: error => { + const match = /already in use by AMI (ami-[a-z0-9]+)/i.exec(inspect(error)); + if (!match) { + return true; + } + const [, imageId] = match; + existingImageId = imageId; + return false; + }, + }); + + if (!existingImageId) { + const { ImageId } = image; return ImageId; } + + await aws.spawn($`ec2 deregister-image --image-id ${existingImageId}`); + const { ImageId } = await aws.spawn($`ec2 create-image ${flags}`); + return ImageId; }, /** @@ -294,7 +1028,60 @@ export const aws = { * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/wait/image-available.html */ async waitImage(action, ...imageIds) { - await aws.spawn($`ec2 wait ${action} --image-ids ${imageIds}`); + await aws.spawn($`ec2 wait ${action} --image-ids ${imageIds}`, { + retryOnError: error => /max attempts exceeded/i.test(inspect(error)), + }); + }, + + /** + * @typedef {Object} AwsKeyPair + * @property {string} KeyPairId + * @property {string} KeyName + * @property {string} KeyFingerprint + * @property {string} [PublicKeyMaterial] + */ + + /** + * @param {string[]} [names] + * @returns {Promise} + * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-key-pairs.html + */ + async describeKeyPairs(names) { + const command = names + ? $`ec2 describe-key-pairs --include-public-key --key-names ${names}` + : $`ec2 describe-key-pairs --include-public-key`; + const { KeyPairs } = await aws.spawn(command); + return KeyPairs; + }, + + /** + * @param {string | Buffer} publicKey + * @param {string} [name] + * @returns {Promise} + * @link https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/import-key-pair.html + */ + async importKeyPair(publicKey, name) { + const keyName = name || `key-pair-${sha256(publicKey)}`; + const publicKeyBase64 = Buffer.from(publicKey).toString("base64"); + + /** @type {AwsKeyPair | undefined} */ + const keyPair = await aws.spawn( + $`ec2 import-key-pair --key-name ${keyName} --public-key-material ${publicKeyBase64}`, + { + throwOnError: error => !/InvalidKeyPair\.Duplicate/i.test(inspect(error)), + }, + ); + + if (keyPair) { + return keyPair; + } + + const keyPairs = await aws.describeKeyPairs(keyName); + if (keyPairs.length) { + return keyPairs[0]; + } + + throw new Error(`Failed to import key pair: ${keyName}`); }, /** @@ -329,36 +1116,36 @@ export const aws = { * @returns {Promise} */ async getBaseImage(options) { - const { os, arch, distro, distroVersion } = options; + const { os, arch, distro, release } = options; let name, owner; if (os === "linux") { if (!distro || distro === "debian") { owner = "amazon"; - name = `debian-${distroVersion || "*"}-${arch === "aarch64" ? "arm64" : "amd64"}-*`; + name = `debian-${release || "*"}-${arch === "aarch64" ? "arm64" : "amd64"}-*`; } else if (distro === "ubuntu") { owner = "099720109477"; - name = `ubuntu/images/hvm-ssd*/ubuntu-*-${distroVersion || "*"}-${arch === "aarch64" ? "arm64" : "amd64"}-server-*`; + name = `ubuntu/images/hvm-ssd*/ubuntu-*-${release || "*"}-${arch === "aarch64" ? "arm64" : "amd64"}-server-*`; } else if (distro === "amazonlinux") { owner = "amazon"; - if (distroVersion === "1") { - // EOL - } else if (distroVersion === "2") { + if (release === "1" && arch === "x64") { + name = `amzn-ami-2018.03.*`; + } else if (release === "2") { name = `amzn2-ami-hvm-*-${arch === "aarch64" ? "arm64" : "x86_64"}-gp2`; } else { - name = `al${distroVersion || "*"}-ami-*-${arch === "aarch64" ? "arm64" : "x86_64"}`; + name = `al${release || "*"}-ami-*-${arch === "aarch64" ? "arm64" : "x86_64"}`; } } else if (distro === "alpine") { owner = "538276064493"; - name = `alpine-${distroVersion || "*"}.*-${arch === "aarch64" ? "aarch64" : "x86_64"}-uefi-cloudinit-*`; + name = `alpine-${release || "*"}.*-${arch === "aarch64" ? "aarch64" : "x86_64"}-uefi-cloudinit-*`; } else if (distro === "centos") { owner = "aws-marketplace"; - name = `CentOS-Stream-ec2-${distroVersion || "*"}-*.${arch === "aarch64" ? "aarch64" : "x86_64"}-*`; + name = `CentOS-Stream-ec2-${release || "*"}-*.${arch === "aarch64" ? "aarch64" : "x86_64"}-*`; } } else if (os === "windows") { if (!distro || distro === "server") { owner = "amazon"; - name = `Windows_Server-${distroVersion || "*"}-English-Full-Base-*`; + name = `Windows_Server-${release || "*"}-English-Full-Base-*`; } } @@ -385,7 +1172,7 @@ export const aws = { * @returns {Promise} */ async createMachine(options) { - const { os, arch, imageId, instanceType, tags } = options; + const { os, arch, imageId, instanceType, tags, sshKeys, preemptible } = options; /** @type {AwsImage} */ let image; @@ -413,7 +1200,7 @@ export const aws = { let userData = getUserData({ ...options, username }); if (os === "windows") { - userData = `${userData}-ExecutionPolicy Unrestricted -NoProfile -NonInteractivefalse`; + userData = `${userData}-ExecutionPolicy Unrestricted -NoProfile -NonInteractivetrue`; } let tagSpecification = []; @@ -426,6 +1213,29 @@ export const aws = { }); } + /** @type {string | undefined} */ + let keyName, keyPath; + if (os === "windows") { + const sshKey = sshKeys.find(({ privatePath }) => existsSync(privatePath)); + if (sshKey) { + const { publicKey, privatePath } = sshKey; + const { KeyName } = await aws.importKeyPair(publicKey); + keyName = KeyName; + keyPath = privatePath; + } + } + + let marketOptions; + if (preemptible) { + marketOptions = JSON.stringify({ + MarketType: "spot", + SpotOptions: { + InstanceInterruptionBehavior: "terminate", + SpotInstanceType: "one-time", + }, + }); + } + const [instance] = await aws.runInstances({ ["image-id"]: ImageId, ["instance-type"]: instanceType || (arch === "aarch64" ? "t4g.large" : "t3.large"), @@ -438,10 +1248,11 @@ export const aws = { "InstanceMetadataTags": "enabled", }), ["tag-specifications"]: JSON.stringify(tagSpecification), - ["key-name"]: "ashcon-bun", + ["key-name"]: keyName, + ["instance-market-options"]: marketOptions, }); - return aws.toMachine(instance, { ...options, username }); + return aws.toMachine(instance, { ...options, username, keyPath }); }, /** @@ -479,6 +1290,13 @@ export const aws = { return spawnSshSafe({ ...connectOptions, command }, options); }; + const rdp = async () => { + const { keyPath } = options; + const { hostname, username } = await connect(); + const password = await aws.getPasswordData(InstanceId, keyPath, { wait: true }); + return { hostname, username, password }; + }; + const attach = async () => { const connectOptions = await connect(); await spawnSshSafe({ ...connectOptions }); @@ -517,6 +1335,7 @@ export const aws = { spawnSafe, upload, attach, + rdp, snapshot, close: terminate, [Symbol.asyncDispose]: terminate, @@ -525,70 +1344,478 @@ export const aws = { }; const google = { - async createMachine(platform) { - const image = await google.getImage(platform); - const { id: imageId, username } = image; + get cloud() { + return "google"; + }, - const authorizedKeys = await getAuthorizedKeys(); - const sshKeys = authorizedKeys?.map(key => `${username}:${key}`).join("\n") ?? ""; + /** + * @param {string[]} args + * @param {import("./utils.mjs").SpawnOptions} [options] + * @returns {Promise} + */ + async spawn(args, options = {}) { + const gcloud = which("gcloud", { required: true }); - const { os, ["instance-type"]: type } = platform; - const instanceType = type || "e2-standard-4"; + let env = { ...process.env }; + // if (isCI) { + // env; // TODO: Add Google Cloud credentials + // } else { + // env["TERM"] = "dumb"; + // } - let metadata = `ssh-keys=${sshKeys}`; - if (os === "windows") { - metadata += `,sysprep-specialize-script-cmd=googet -noconfirm=true install google-compute-engine-ssh,enable-windows-ssh=TRUE`; - } - - const [{ id, networkInterfaces }] = await google.createInstances({ - ["zone"]: "us-central1-a", - ["image"]: imageId, - ["machine-type"]: instanceType, - ["boot-disk-auto-delete"]: true, - // ["boot-disk-size"]: "10GB", - // ["boot-disk-type"]: "pd-standard", - ["metadata"]: metadata, + const { stdout } = await spawnSafe($`${gcloud} ${args} --format json`, { + env, + ...options, }); + try { + return JSON.parse(stdout); + } catch { + return; + } + }, - const publicIp = () => { - for (const { accessConfigs } of networkInterfaces) { - for (const { natIP } of accessConfigs) { - return natIP; + /** + * @param {Record} [options] + * @returns {string[]} + */ + getFilters(options = {}) { + const filter = Object.entries(options) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`]) + .join(" AND "); + return filter ? ["--filter", filter] : []; + }, + + /** + * @param {Record} options + * @returns {string[]} + */ + getFlags(options) { + return Object.entries(options) + .filter(([, value]) => value !== undefined) + .flatMap(([key, value]) => { + if (typeof value === "boolean") { + return value ? [`--${key}`] : []; + } + return [`--${key}=${value}`]; + }); + }, + + /** + * @param {Record} options + * @returns {string} + * @link https://cloud.google.com/sdk/gcloud/reference/topic/escaping + */ + getMetadata(options) { + const delimiter = Math.random().toString(36).substring(2, 15); + const entries = Object.entries(options) + .map(([key, value]) => `${key}=${value}`) + .join(delimiter); + return `^${delimiter}^${entries}`; + }, + + /** + * @param {string} name + * @returns {string} + */ + getLabel(name) { + return name.replace(/[^a-z0-9_-]/g, "-").toLowerCase(); + }, + + /** + * @typedef {Object} GoogleImage + * @property {string} id + * @property {string} name + * @property {string} family + * @property {"X86_64" | "ARM64"} architecture + * @property {string} diskSizeGb + * @property {string} selfLink + * @property {"READY"} status + * @property {string} creationTimestamp + */ + + /** + * @param {Partial} [options] + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/images/list + */ + async listImages(options) { + const filters = google.getFilters(options); + const images = await google.spawn($`compute images list ${filters} --preview-images --show-deprecated`); + return images.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); + }, + + /** + * @param {Record} options + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/images/create + */ + async createImage(options) { + const { name, ...otherOptions } = options; + const flags = this.getFlags(otherOptions); + const imageId = name || "i-" + Math.random().toString(36).substring(2, 15); + return this.spawn($`compute images create ${imageId} ${flags}`); + }, + + /** + * @typedef {Object} GoogleInstance + * @property {string} id + * @property {string} name + * @property {"RUNNING"} status + * @property {string} machineType + * @property {string} zone + * @property {GoogleDisk[]} disks + * @property {GoogleNetworkInterface[]} networkInterfaces + * @property {object} [scheduling] + * @property {"STANDARD" | "SPOT"} [scheduling.provisioningModel] + * @property {boolean} [scheduling.preemptible] + * @property {Record} [labels] + * @property {string} selfLink + * @property {string} creationTimestamp + */ + + /** + * @typedef {Object} GoogleDisk + * @property {string} deviceName + * @property {boolean} boot + * @property {"X86_64" | "ARM64"} architecture + * @property {string[]} [licenses] + * @property {number} diskSizeGb + */ + + /** + * @typedef {Object} GoogleNetworkInterface + * @property {"IPV4_ONLY" | "IPV4_IPV6" | "IPV6_ONLY"} stackType + * @property {string} name + * @property {string} network + * @property {string} networkIP + * @property {string} subnetwork + * @property {GoogleAccessConfig[]} accessConfigs + */ + + /** + * @typedef {Object} GoogleAccessConfig + * @property {string} name + * @property {"ONE_TO_ONE_NAT" | "INTERNAL_NAT"} type + * @property {string} [natIP] + */ + + /** + * @param {Record} options + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/create + */ + async createInstance(options) { + const { name, ...otherOptions } = options || {}; + const flags = this.getFlags(otherOptions); + const instanceId = name || "i-" + Math.random().toString(36).substring(2, 15); + const [instance] = await this.spawn($`compute instances create ${instanceId} ${flags}`); + return instance; + }, + + /** + * @param {string} instanceId + * @param {string} zoneId + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/stop + */ + async stopInstance(instanceId, zoneId) { + await this.spawn($`compute instances stop ${instanceId} --zone=${zoneId}`); + }, + + /** + * @param {string} instanceId + * @param {string} zoneId + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/delete + */ + async deleteInstance(instanceId, zoneId) { + await this.spawn($`compute instances delete ${instanceId} --delete-disks=all --zone=${zoneId}`, { + throwOnError: error => !/not found/i.test(inspect(error)), + }); + }, + + /** + * @param {string} instanceId + * @param {string} username + * @param {string} zoneId + * @param {object} [options] + * @param {boolean} [options.wait] + * @returns {Promise} + * @link https://cloud.google.com/sdk/gcloud/reference/compute/reset-windows-password + */ + async resetWindowsPassword(instanceId, username, zoneId, options = {}) { + const attempts = options.wait ? 15 : 1; + for (let i = 0; i < attempts; i++) { + const result = await this.spawn( + $`compute reset-windows-password ${instanceId} --user=${username} --zone=${zoneId}`, + { + throwOnError: error => !/instance may not be ready for use/i.test(inspect(error)), + }, + ); + if (result) { + const { password } = result; + if (password) { + return password; } } - throw new Error(`Failed to find public IP for instance: ${id}`); + await new Promise(resolve => setTimeout(resolve, 60000 * i)); + } + }, + + /** + * @param {Partial} options + * @returns {Promise} + */ + async listInstances(options) { + const filters = this.getFilters(options); + const instances = await this.spawn($`compute instances list ${filters}`); + return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async getMachineImage(options) { + const { os, arch, distro, release } = options; + const architecture = arch === "aarch64" ? "ARM64" : "X86_64"; + + /** @type {string | undefined} */ + let family; + if (os === "linux") { + if (!distro || distro === "debian") { + family = `debian-${release || "*"}`; + } else if (distro === "ubuntu") { + family = `ubuntu-${release?.replace(/\./g, "") || "*"}`; + } else if (distro === "fedora") { + family = `fedora-coreos-${release || "*"}`; + } else if (distro === "rhel") { + family = `rhel-${release || "*"}`; + } + } else if (os === "windows" && arch === "x64") { + if (!distro || distro === "server") { + family = `windows-${release || "*"}`; + } + } + + if (family) { + const images = await this.listImages({ family, architecture }); + if (images.length) { + const [image] = images; + return image; + } + } + + throw new Error(`Unsupported platform: ${inspect(options)}`); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async createMachine(options) { + const { name, os, arch, distro, instanceType, tags, preemptible, detached } = options; + const image = await google.getMachineImage(options); + const { selfLink: imageUrl } = image; + + const username = getUsername(distro || os); + const userData = getUserData({ ...options, username }); + + /** @type {Record} */ + let metadata; + if (os === "windows") { + metadata = { + "enable-windows-ssh": "TRUE", + "sysprep-specialize-script-ps1": userData, + }; + } else { + metadata = { + "user-data": userData, + }; + } + + const instance = await google.createInstance({ + "name": name, + "zone": "us-central1-a", + "image": imageUrl, + "machine-type": instanceType || (arch === "aarch64" ? "t2a-standard-2" : "t2d-standard-2"), + "boot-disk-auto-delete": true, + "boot-disk-size": `${getDiskSize(options)}GB`, + "metadata": this.getMetadata(metadata), + "labels": Object.entries(tags || {}) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => `${this.getLabel(key)}=${value}`) + .join(","), + "provisioning-model": preemptible ? "SPOT" : "STANDARD", + "instance-termination-action": preemptible || !detached ? "DELETE" : undefined, + "no-restart-on-failure": true, + "threads-per-core": 1, + "max-run-duration": detached ? undefined : "6h", + }); + + return this.toMachine(instance, options); + }, + + /** + * @param {GoogleInstance} instance + * @param {MachineOptions} [options] + * @returns {Machine} + */ + toMachine(instance, options = {}) { + const { id: instanceId, name, zone: zoneUrl, machineType: machineTypeUrl, labels } = instance; + const machineType = machineTypeUrl.split("/").pop(); + const zoneId = zoneUrl.split("/").pop(); + + let os, arch, distro, release; + const { disks = [] } = instance; + for (const { boot, architecture, licenses = [] } of disks) { + if (!boot) { + continue; + } + + if (architecture === "X86_64") { + arch = "x64"; + } else if (architecture === "ARM64") { + arch = "aarch64"; + } + + for (const license of licenses) { + const linuxMatch = /(debian|ubuntu|fedora|rhel)-(\d+)/i.exec(license); + if (linuxMatch) { + os = "linux"; + [, distro, release] = linuxMatch; + } else { + const windowsMatch = /windows-server-(\d+)-dc-core/i.exec(license); + if (windowsMatch) { + os = "windows"; + distro = "windowsserver"; + [, release] = windowsMatch; + } + } + } + } + + let publicIp; + const { networkInterfaces = [] } = instance; + for (const { accessConfigs = [] } of networkInterfaces) { + for (const { type, natIP } of accessConfigs) { + if (type === "ONE_TO_ONE_NAT" && natIP) { + publicIp = natIP; + } + } + } + + let preemptible; + const { scheduling } = instance; + if (scheduling) { + const { provisioningModel, preemptible: isPreemptible } = scheduling; + preemptible = provisioningModel === "SPOT" || isPreemptible; + } + + /** + * @returns {SshOptions} + */ + const connect = () => { + if (!publicIp) { + throw new Error(`Failed to find public IP for instance: ${name}`); + } + + /** @type {string | undefined} */ + let username; + + const { os, distro } = options; + if (os || distro) { + username = getUsername(distro || os); + } + + return { hostname: publicIp, username }; }; - const spawn = command => { - const hostname = publicIp(); - return spawnSsh({ hostname, username, command }); + const spawn = async (command, options) => { + const connectOptions = connect(); + return spawnSsh({ ...connectOptions, command }, options); }; - const spawnSafe = command => { - const hostname = publicIp(); - return spawnSshSafe({ hostname, username, command }); + const spawnSafe = async (command, options) => { + const connectOptions = connect(); + return spawnSshSafe({ ...connectOptions, command }, options); + }; + + const rdp = async () => { + const { hostname, username } = connect(); + const rdpUsername = `${username}-rdp`; + const password = await google.resetWindowsPassword(instanceId, rdpUsername, zoneId, { wait: true }); + return { hostname, username: rdpUsername, password }; }; const attach = async () => { - const hostname = publicIp(); - await spawnSshSafe({ hostname, username }); + const connectOptions = connect(); + await spawnSshSafe({ ...connectOptions }); + }; + + const upload = async (source, destination) => { + const connectOptions = connect(); + await spawnScp({ ...connectOptions, source, destination }); + }; + + const snapshot = async name => { + const stopResult = await this.stopInstance(instanceId, zoneId); + console.log(stopResult); + const image = await this.createImage({ + ["source-disk"]: instanceId, + ["zone"]: zoneId, + ["name"]: name || `${instanceId}-snapshot-${Date.now()}`, + }); + console.log(image); + return; }; const terminate = async () => { - await google.deleteInstance(id); + await google.deleteInstance(instanceId, zoneId); }; return { + cloud: "google", + os, + arch, + distro, + release, + id: instanceId, + imageId: undefined, + name, + instanceType: machineType, + region: zoneId, + publicIp, + preemptible, + labels, spawn, spawnSafe, + rdp, attach, + upload, + snapshot, close: terminate, [Symbol.asyncDispose]: terminate, }; }, - async getImage(platform) { - const { os, arch, distro, release } = platform; + /** + * @param {Record} [labels] + * @returns {Promise} + */ + async getMachines(labels) { + const filters = labels ? this.getFilters({ labels }) : {}; + const instances = await google.listInstances(filters); + return instances.map(instance => this.toMachine(instance)); + }, + + /** + * @param {MachineOptions} options + * @returns {Promise} + */ + async getImage(options) { + const { os, arch, distro, release } = options; const architecture = arch === "aarch64" ? "ARM64" : "X86_64"; let name; @@ -623,49 +1850,6 @@ const google = { throw new Error(`Unsupported platform: ${inspect(platform)}`); }, - - async listImages(options = {}) { - const filter = Object.entries(options) - .map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`]) - .join(" AND "); - const filters = filter ? ["--filter", filter] : []; - const { stdout } = await spawnSafe(["gcloud", "compute", "images", "list", ...filters, "--format", "json"]); - const images = JSON.parse(stdout); - return images.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); - }, - - async listInstances(options = {}) { - const filter = Object.entries(options) - .map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`]) - .join(" AND "); - const filters = filter ? ["--filter", filter] : []; - const { stdout } = await spawnSafe(["gcloud", "compute", "instances", "list", ...filters, "--format", "json"]); - const instances = JSON.parse(stdout); - return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); - }, - - async createInstances(options = {}) { - const flags = Object.entries(options).flatMap(([key, value]) => - typeof value === "boolean" ? `--${key}` : `--${key}=${value}`, - ); - const randomId = "i-" + Math.random().toString(36).substring(2, 15); - const { stdout } = await spawnSafe([ - "gcloud", - "compute", - "instances", - "create", - randomId, - ...flags, - "--format", - "json", - ]); - const instances = JSON.parse(stdout); - return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1)); - }, - - async deleteInstance(instanceId) { - await spawnSafe(["gcloud", "compute", "instances", "delete", instanceId, "--zone", "us-central1-a", "--quiet"]); - }, }; /** @@ -676,6 +1860,10 @@ const google = { * @property {string} [password] */ +/** + * @param {CloudInit} cloudInit + * @returns {string} + */ function getUserData(cloudInit) { const { os } = cloudInit; if (os === "windows") { @@ -705,24 +1893,28 @@ function getCloudInit(cloudInit) { break; } + let users; + if (username === "root") { + users = [`root:${password}`]; + } else { + users = [`root:${password}`, `${username}:${password}`]; + } + // https://cloudinit.readthedocs.io/en/stable/ return `#cloud-config - write_files: - path: /etc/ssh/sshd_config content: | PermitRootLogin yes - PasswordAuthentication yes + PasswordAuthentication no + PubkeyAuthentication yes + UsePAM yes + UseLogin yes Subsystem sftp ${sftpPath} - chpasswd: expire: false - list: | - root:${password} - ${username}:${password} - + list: ${JSON.stringify(users)} disable_root: false - ssh_pwauth: true ssh_authorized_keys: ${authorizedKeys} `; @@ -734,39 +1926,47 @@ function getCloudInit(cloudInit) { */ function getWindowsStartupScript(cloudInit) { const { sshKeys } = cloudInit; - const authorizedKeys = sshKeys.filter(({ publicKey }) => publicKey).map(({ publicKey }) => publicKey); + const authorizedKeys = sshKeys.map(({ publicKey }) => publicKey); return ` $ErrorActionPreference = "Stop" Set-ExecutionPolicy -Scope Process -ExecutionPolicy Bypass -Force function Install-Ssh { - $sshService = Get-WindowsCapability -Online | Where-Object Name -like 'OpenSSH.Server*' - if ($sshService.State -ne "Installed") { - Write-Output "Installing OpenSSH server..." - Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 + $sshdService = Get-Service -Name sshd -ErrorAction SilentlyContinue + if (-not $sshdService) { + $buildNumber = Get-WmiObject Win32_OperatingSystem | Select-Object -ExpandProperty BuildNumber + if ($buildNumber -lt 17763) { + Write-Output "Installing OpenSSH server through Github..." + [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + Invoke-WebRequest -Uri "https://github.com/PowerShell/Win32-OpenSSH/releases/download/v9.8.0.0p1-Preview/OpenSSH-Win64.zip" -OutFile "$env:TEMP\\OpenSSH.zip" + Expand-Archive -Path "$env:TEMP\\OpenSSH.zip" -DestinationPath "$env:TEMP\\OpenSSH" -Force + Get-ChildItem -Path "$env:TEMP\\OpenSSH\\OpenSSH-Win64" -Recurse | Move-Item -Destination "$env:ProgramFiles\\OpenSSH" -Force + & "$env:ProgramFiles\\OpenSSH\\install-sshd.ps1" + } else { + Write-Output "Installing OpenSSH server through Windows Update..." + Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 + } } + Write-Output "Enabling OpenSSH server..." + Set-Service -Name sshd -StartupType Automatic + Start-Service sshd + $pwshPath = Get-Command pwsh -ErrorAction SilentlyContinue | Select-Object -ExpandProperty Path if (-not $pwshPath) { $pwshPath = Get-Command powershell -ErrorAction SilentlyContinue | Select-Object -ExpandProperty Path } - if (-not (Get-Service -Name sshd -ErrorAction SilentlyContinue)) { - Write-Output "Enabling OpenSSH server..." - Set-Service -Name sshd -StartupType Automatic - Start-Service sshd - } - if ($pwshPath) { Write-Output "Setting default shell to $pwshPath..." New-ItemProperty -Path "HKLM:\\SOFTWARE\\OpenSSH" -Name DefaultShell -Value $pwshPath -PropertyType String -Force } - $firewallRule = Get-NetFirewallRule -Name "OpenSSH-Server-In-TCP" -ErrorAction SilentlyContinue + $firewallRule = Get-NetFirewallRule -Name "OpenSSH-Server" -ErrorAction SilentlyContinue if (-not $firewallRule) { Write-Output "Configuring firewall..." - New-NetFirewallRule -Name 'OpenSSH-Server-In-TCP' -DisplayName 'OpenSSH Server (sshd)' -Enabled True -Direction Inbound -Protocol TCP -Action Allow -LocalPort 22 + New-NetFirewallRule -Profile Any -Name 'OpenSSH-Server' -DisplayName 'OpenSSH Server (sshd)' -Enabled True -Direction Inbound -Protocol TCP -Action Allow -LocalPort 22 } $sshPath = "C:\\ProgramData\\ssh" @@ -841,13 +2041,19 @@ function getDiskSize(options) { return diskSizeGb; } - return os === "windows" ? 50 : 30; + // After Visual Studio and dependencies are installed, + // there is ~50GB of used disk space. + if (os === "windows") { + return 60; + } + + return 30; } /** * @typedef SshKey - * @property {string} privatePath - * @property {string} publicPath + * @property {string} [privatePath] + * @property {string} [publicPath] * @property {string} publicKey */ @@ -855,24 +2061,27 @@ function getDiskSize(options) { * @returns {SshKey} */ function createSshKey() { + const sshKeyGen = which("ssh-keygen", { required: true }); + const sshAdd = which("ssh-add", { required: true }); + const sshPath = join(homedir(), ".ssh"); - if (!existsSync(sshPath)) { - mkdirSync(sshPath, { recursive: true }); - } - - const name = `id_rsa_${crypto.randomUUID()}`; - const privatePath = join(sshPath, name); - const publicPath = join(sshPath, `${name}.pub`); - spawnSyncSafe(["ssh-keygen", "-t", "rsa", "-b", "4096", "-f", privatePath, "-N", ""], { stdio: "inherit" }); + mkdir(sshPath); + const filename = `id_rsa_${crypto.randomUUID()}`; + const privatePath = join(sshPath, filename); + const publicPath = join(sshPath, `${filename}.pub`); + spawnSyncSafe([sshKeyGen, "-t", "rsa", "-b", "4096", "-f", privatePath, "-N", ""], { stdio: "inherit" }); if (!existsSync(privatePath) || !existsSync(publicPath)) { throw new Error(`Failed to generate SSH key: ${privatePath} / ${publicPath}`); } - const sshAgent = which("ssh-agent"); - const sshAdd = which("ssh-add"); - if (sshAgent && sshAdd) { - spawnSyncSafe(["sh", "-c", `eval $(${sshAgent} -s) && ${sshAdd} ${privatePath}`], { stdio: "inherit" }); + if (isWindows) { + spawnSyncSafe([sshAdd, privatePath], { stdio: "inherit" }); + } else { + const sshAgent = which("ssh-agent"); + if (sshAgent) { + spawnSyncSafe(["sh", "-c", `eval $(${sshAgent} -s) && ${sshAdd} ${privatePath}`], { stdio: "inherit" }); + } } return { @@ -894,7 +2103,7 @@ function getSshKeys() { /** @type {SshKey[]} */ const sshKeys = []; if (existsSync(sshPath)) { - const sshFiles = readdirSync(sshPath, { withFileTypes: true }); + const sshFiles = readdirSync(sshPath, { withFileTypes: true, encoding: "utf-8" }); const publicPaths = sshFiles .filter(entry => entry.isFile() && entry.name.endsWith(".pub")) .map(({ name }) => join(sshPath, name)); @@ -917,11 +2126,41 @@ function getSshKeys() { return sshKeys; } +/** + * @param {string} username + * @returns {Promise} + */ +async function getGithubUserSshKeys(username) { + const url = new URL(`${username}.keys`, getGithubUrl()); + const publicKeys = await curlSafe(url); + return publicKeys + .split("\n") + .filter(key => key.length) + .map(key => ({ publicKey: `${key} github@${username}` })); +} + +/** + * @param {string} organization + * @returns {Promise} + */ +async function getGithubOrgSshKeys(organization) { + const url = new URL(`orgs/${encodeURIComponent(organization)}/members`, getGithubApiUrl()); + const members = await curlSafe(url, { json: true }); + + /** @type {SshKey[][]} */ + const sshKeys = await Promise.all( + members.filter(({ type, login }) => type === "User" && login).map(({ login }) => getGithubUserSshKeys(login)), + ); + + return sshKeys.flat(); +} + /** * @typedef SshOptions * @property {string} hostname * @property {number} [port] * @property {string} [username] + * @property {string} [password] * @property {string[]} [command] * @property {string[]} [identityPaths] * @property {number} [retries] @@ -929,56 +2168,76 @@ function getSshKeys() { /** * @param {SshOptions} options - * @param {object} [spawnOptions] + * @param {import("./utils.mjs").SpawnOptions} [spawnOptions] * @returns {Promise} */ async function spawnSsh(options, spawnOptions = {}) { - const { hostname, port, username, identityPaths, command } = options; - await waitForPort({ hostname, port: port || 22 }); + const { hostname, port, username, identityPaths, password, retries = 10, command: spawnCommand } = options; - const ssh = ["ssh", hostname, "-o", "StrictHostKeyChecking=no", "-o", "BatchMode=yes"]; + if (!hostname.includes("@")) { + await waitForPort({ + hostname, + port: port || 22, + }); + } + + const logPath = mkdtemp("ssh-", "ssh.log"); + const command = ["ssh", hostname, "-v", "-C", "-E", logPath, "-o", "StrictHostKeyChecking=no"]; + if (!password) { + command.push("-o", "BatchMode=yes"); + } if (port) { - ssh.push("-p", port); + command.push("-p", port); } if (username) { - ssh.push("-l", username); + command.push("-l", username); } - if (identityPaths) { - ssh.push(...identityPaths.flatMap(path => ["-i", path])); + if (password) { + const sshPass = which("sshpass", { required: true }); + command.unshift(sshPass, "-p", password); + } else if (identityPaths) { + command.push(...identityPaths.flatMap(path => ["-i", path])); } - const stdio = command ? "pipe" : "inherit"; - if (command) { - ssh.push(...command); + const stdio = spawnCommand ? "pipe" : "inherit"; + if (spawnCommand) { + command.push(...spawnCommand); } - return spawn(ssh, { stdio, ...spawnOptions }); + /** @type {import("./utils.mjs").SpawnResult} */ + let result; + for (let i = 0; i < retries; i++) { + result = await spawn(command, { stdio, ...spawnOptions, throwOnError: undefined }); + + const { exitCode } = result; + if (exitCode !== 255) { + break; + } + + const sshLogs = readFile(logPath, { encoding: "utf-8" }); + if (sshLogs.includes("Authenticated")) { + break; + } + + await new Promise(resolve => setTimeout(resolve, (i + 1) * 15000)); + } + + if (spawnOptions?.throwOnError) { + const { error } = result; + if (error) { + throw error; + } + } + + return result; } /** * @param {SshOptions} options - * @param {object} [spawnOptions] + * @param {import("./utils.mjs").SpawnOptions} [spawnOptions] * @returns {Promise} */ async function spawnSshSafe(options, spawnOptions = {}) { - const { hostname, port, username, identityPaths, command } = options; - await waitForPort({ hostname, port: port || 22 }); - - const ssh = ["ssh", hostname, "-o", "StrictHostKeyChecking=no", "-o", "BatchMode=yes"]; - if (port) { - ssh.push("-p", port); - } - if (username) { - ssh.push("-l", username); - } - if (identityPaths) { - ssh.push(...identityPaths.flatMap(path => ["-i", path])); - } - const stdio = command ? "pipe" : "inherit"; - if (command) { - ssh.push(...command); - } - - return spawnSafe(ssh, { stdio, ...spawnOptions }); + return spawnSsh(options, { throwOnError: true, ...spawnOptions }); } /** @@ -997,14 +2256,20 @@ async function spawnSshSafe(options, spawnOptions = {}) { * @returns {Promise} */ async function spawnScp(options) { - const { hostname, port, username, identityPaths, source, destination, retries = 10 } = options; + const { hostname, port, username, identityPaths, password, source, destination, retries = 10 } = options; await waitForPort({ hostname, port: port || 22 }); - const command = ["scp", "-o", "StrictHostKeyChecking=no", "-o", "BatchMode=yes"]; + const command = ["scp", "-o", "StrictHostKeyChecking=no"]; + if (!password) { + command.push("-o", "BatchMode=yes"); + } if (port) { command.push("-P", port); } - if (identityPaths) { + if (password) { + const sshPass = which("sshpass", { required: true }); + command.unshift(sshPass, "-p", password); + } else if (identityPaths) { command.push(...identityPaths.flatMap(path => ["-i", path])); } command.push(resolve(source)); @@ -1032,6 +2297,53 @@ async function spawnScp(options) { throw new Error(`SCP failed: ${source} -> ${username}@${hostname}:${destination}`, { cause }); } +/** + * @param {string} passwordData + * @param {string} privateKeyPath + * @returns {string} + */ +function decryptPassword(passwordData, privateKeyPath) { + const name = basename(privateKeyPath, extname(privateKeyPath)); + const tmpPemPath = mkdtemp("pem-", `${name}.pem`); + try { + copyFile(privateKeyPath, tmpPemPath, { mode: 0o600 }); + spawnSyncSafe(["ssh-keygen", "-p", "-m", "PEM", "-f", tmpPemPath, "-N", ""]); + const { stdout } = spawnSyncSafe( + ["openssl", "pkeyutl", "-decrypt", "-inkey", tmpPemPath, "-pkeyopt", "rsa_padding_mode:pkcs1"], + { + stdin: Buffer.from(passwordData, "base64"), + }, + ); + return stdout.trim(); + } finally { + rm(tmpPemPath); + } +} + +/** + * @typedef RdpCredentials + * @property {string} hostname + * @property {string} username + * @property {string} password + */ + +/** + * @param {string} hostname + * @param {string} [username] + * @param {string} [password] + * @returns {string} + */ +function getRdpFile(hostname, username) { + const options = [ + "auto connect:i:1", // start the connection automatically + `full address:s:${hostname}`, + ]; + if (username) { + options.push(`username:s:${username}`); + } + return options.join("\n"); +} + /** * @typedef Cloud * @property {string} name @@ -1044,52 +2356,80 @@ async function spawnScp(options) { */ function getCloud(name) { switch (name) { + case "docker": + return docker; + case "orbstack": + return orbstack; + case "tart": + return tart; case "aws": return aws; + case "google": + return google; } throw new Error(`Unsupported cloud: ${name}`); } /** - * @typedef Machine + * @typedef {"linux" | "darwin" | "windows"} Os + * @typedef {"aarch64" | "x64"} Arch + * @typedef {"macos" | "windowsserver" | "debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro + */ + +/** + * @typedef {Object} Platform + * @property {Os} os + * @property {Arch} arch + * @property {Distro} distro + * @property {string} release + * @property {string} [eol] + */ + +/** + * @typedef {Object} Machine * @property {string} cloud + * @property {Os} [os] + * @property {Arch} [arch] + * @property {Distro} [distro] + * @property {string} [release] * @property {string} [name] * @property {string} id * @property {string} imageId * @property {string} instanceType * @property {string} region * @property {string} [publicIp] - * @property {(command: string[]) => Promise} spawn - * @property {(command: string[]) => Promise} spawnSafe + * @property {boolean} [preemptible] + * @property {Record} tags + * @property {(command: string[], options?: import("./utils.mjs").SpawnOptions) => Promise} spawn + * @property {(command: string[], options?: import("./utils.mjs").SpawnOptions) => Promise} spawnSafe * @property {(source: string, destination: string) => Promise} upload + * @property {() => Promise} [rdp] * @property {() => Promise} attach * @property {() => Promise} snapshot * @property {() => Promise} close */ -/** - * @typedef {"linux" | "darwin" | "windows"} Os - * @typedef {"aarch64" | "x64"} Arch - */ - /** * @typedef MachineOptions * @property {Cloud} cloud * @property {Os} os * @property {Arch} arch - * @property {string} distro - * @property {string} [distroVersion] + * @property {Distro} distro + * @property {string} [release] + * @property {string} [name] + * @property {string} [instanceType] * @property {string} [imageId] * @property {string} [imageName] * @property {number} [cpuCount] * @property {number} [memoryGb] * @property {number} [diskSizeGb] - * @property {boolean} [persistent] + * @property {boolean} [preemptible] * @property {boolean} [detached] * @property {Record} [tags] * @property {boolean} [bootstrap] * @property {boolean} [ci] - * @property {SshKey[]} [sshKeys] + * @property {boolean} [rdp] + * @property {SshKey[]} sshKeys */ async function main() { @@ -1111,52 +2451,74 @@ async function main() { "os": { type: "string", default: "linux" }, "arch": { type: "string", default: "x64" }, "distro": { type: "string" }, - "distro-version": { type: "string" }, + "release": { type: "string" }, + "name": { type: "string" }, "instance-type": { type: "string" }, "image-id": { type: "string" }, "image-name": { type: "string" }, "cpu-count": { type: "string" }, "memory-gb": { type: "string" }, "disk-size-gb": { type: "string" }, - "persistent": { type: "boolean" }, + "preemptible": { type: "boolean" }, + "spot": { type: "boolean" }, "detached": { type: "boolean" }, "tag": { type: "string", multiple: true }, "ci": { type: "boolean" }, + "rdp": { type: "boolean" }, + "vnc": { type: "boolean" }, + "authorized-user": { type: "string", multiple: true }, + "authorized-org": { type: "string", multiple: true }, "no-bootstrap": { type: "boolean" }, "buildkite-token": { type: "string" }, "tailscale-authkey": { type: "string" }, }, }); + const sshKeys = getSshKeys(); + if (args["authorized-user"]) { + const userSshKeys = await Promise.all(args["authorized-user"].map(getGithubUserSshKeys)); + sshKeys.push(...userSshKeys.flat()); + } + if (args["authorized-org"]) { + const orgSshKeys = await Promise.all(args["authorized-org"].map(getGithubOrgSshKeys)); + sshKeys.push(...orgSshKeys.flat()); + } + + const tags = { + "robobun": "true", + "robobun2": "true", + "buildkite:token": args["buildkite-token"], + "tailscale:authkey": args["tailscale-authkey"], + ...Object.fromEntries(args["tag"]?.map(tag => tag.split("=")) ?? []), + }; + + const cloud = getCloud(args["cloud"]); + /** @type {MachineOptions} */ const options = { - cloud: getCloud(args["cloud"]), + cloud: args["cloud"], os: parseOs(args["os"]), arch: parseArch(args["arch"]), distro: args["distro"], - distroVersion: args["distro-version"], + release: args["release"], + name: args["name"], instanceType: args["instance-type"], imageId: args["image-id"], imageName: args["image-name"], - tags: { - "robobun": "true", - "robobun2": "true", - "buildkite:token": args["buildkite-token"], - "tailscale:authkey": args["tailscale-authkey"], - ...Object.fromEntries(args["tag"]?.map(tag => tag.split("=")) ?? []), - }, + tags, cpuCount: parseInt(args["cpu-count"]) || undefined, memoryGb: parseInt(args["memory-gb"]) || undefined, diskSizeGb: parseInt(args["disk-size-gb"]) || undefined, - persistent: !!args["persistent"], + preemptible: !!args["preemptible"] || !!args["spot"], detached: !!args["detached"], bootstrap: args["no-bootstrap"] !== true, ci: !!args["ci"], - sshKeys: getSshKeys(), + rdp: !!args["rdp"] || !!args["vnc"], + sshKeys, }; - const { cloud, detached, bootstrap, ci, os, arch, distro, distroVersion } = options; - const name = `${os}-${arch}-${distro}-${distroVersion}`; + const { detached, bootstrap, ci, os, arch, distro, release } = options; + const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`; let bootstrapPath, agentPath; if (bootstrap) { @@ -1178,9 +2540,26 @@ async function main() { /** @type {Machine} */ const machine = await startGroup("Creating machine...", async () => { - console.log("Creating machine:", JSON.parse(JSON.stringify(options))); + console.log("Creating machine:"); + console.table({ + "Operating System": os, + "Architecture": arch, + "Distribution": distro ? `${distro} ${release}` : release, + "CI": ci ? "Yes" : "No", + }); + const result = await cloud.createMachine(options); - console.log("Created machine:", result); + const { id, name, imageId, instanceType, region, publicIp } = result; + console.log("Created machine:"); + console.table({ + "ID": id, + "Name": name || "N/A", + "Image ID": imageId, + "Instance Type": instanceType, + "Region": region, + "IP Address": publicIp || "TBD", + }); + return result; }); @@ -1201,7 +2580,38 @@ async function main() { } try { - await startGroup("Connecting...", async () => { + if (options.rdp) { + await startGroup("Connecting with RDP...", async () => { + const { hostname, username, password } = await machine.rdp(); + + console.log("You can now connect with RDP using these credentials:"); + console.table({ + Hostname: hostname, + Username: username, + Password: password, + }); + + const { cloud, id } = machine; + const rdpPath = mkdtemp("rdp-", `${cloud}-${id}.rdp`); + + /** @type {string[]} */ + let command; + if (isMacOS) { + command = [ + "osascript", + "-e", + `'tell application "Microsoft Remote Desktop" to open POSIX file ${JSON.stringify(rdpPath)}'`, + ]; + } + + if (command) { + writeFile(rdpPath, getRdpFile(hostname, username)); + await spawn(command, { detached: true }); + } + }); + } + + await startGroup("Connecting with SSH...", async () => { const command = os === "windows" ? ["cmd", "/c", "ver"] : ["uname", "-a"]; await machine.spawnSafe(command, { stdio: "inherit" }); }); @@ -1226,12 +2636,14 @@ async function main() { if (agentPath) { if (os === "windows") { - // TODO - // const remotePath = "C:\\Windows\\Temp\\agent.mjs"; - // await startGroup("Installing agent...", async () => { - // await machine.upload(agentPath, remotePath); - // await machine.spawnSafe(["node", remotePath, "install"], { stdio: "inherit" }); - // }); + const remotePath = "C:\\buildkite-agent\\agent.mjs"; + await startGroup("Installing agent...", async () => { + await machine.upload(agentPath, remotePath); + if (cloud.name === "docker") { + return; + } + await machine.spawnSafe(["node", remotePath, "install"], { stdio: "inherit" }); + }); } else { const tmpPath = "/tmp/agent.mjs"; const remotePath = "/var/lib/buildkite-agent/agent.mjs"; @@ -1245,6 +2657,9 @@ async function main() { } } await machine.spawnSafe([...command, "cp", tmpPath, remotePath]); + if (cloud.name === "docker") { + return; + } { const { stdout } = await machine.spawn(["node", "-v"]); const version = parseInt(stdout.trim().replace(/^v/, "")); @@ -1262,7 +2677,7 @@ async function main() { if (command === "create-image" || command === "publish-image") { let suffix; if (command === "publish-image") { - suffix = `v${getBootstrapVersion()}`; + suffix = `v${getBootstrapVersion(os)}`; } else if (isCI) { suffix = `build-${getBuildNumber()}`; } else { diff --git a/scripts/runner.node.mjs b/scripts/runner.node.mjs index 483f918a27..3fa3eaa66a 100755 --- a/scripts/runner.node.mjs +++ b/scripts/runner.node.mjs @@ -17,7 +17,6 @@ import { accessSync, appendFileSync, readdirSync, - rmSync, } from "node:fs"; import { spawn, spawnSync } from "node:child_process"; import { join, basename, dirname, relative, sep } from "node:path"; @@ -27,6 +26,8 @@ import { getBuildUrl, getEnv, getFileUrl, + getLoggedInUserCount, + getShell, getWindowsExitReason, isArm64, isBuildkite, @@ -59,6 +60,10 @@ const { values: options, positionals: filters } = parseArgs({ type: "string", default: undefined, }, + ["build-id"]: { + type: "string", + default: undefined, + }, ["bail"]: { type: "boolean", default: false, @@ -99,32 +104,7 @@ const { values: options, positionals: filters } = parseArgs({ async function runTests() { let execPath; if (options["step"]) { - downloadLoop: for (let i = 0; i < 10; i++) { - execPath = await getExecPathFromBuildKite(options["step"]); - for (let j = 0; j < 10; j++) { - const { error } = spawnSync(execPath, ["--version"], { - encoding: "utf-8", - timeout: spawnTimeout, - env: { - PATH: process.env.PATH, - BUN_DEBUG_QUIET_LOGS: 1, - }, - }); - if (!error) { - break downloadLoop; - } - const { code } = error; - if (code === "EBUSY") { - console.log("Bun appears to be busy, retrying..."); - continue; - } - if (code === "UNKNOWN") { - console.log("Bun appears to be corrupted, downloading again..."); - rmSync(execPath, { force: true }); - continue downloadLoop; - } - } - } + execPath = await getExecPathFromBuildKite(options["step"], options["build-id"]); } else { execPath = getExecPath(options["exec-path"]); } @@ -482,12 +462,14 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { const path = addPath(dirname(execPath), process.env.PATH); const tmpdirPath = mkdtempSync(join(tmpdir(), "buntmp-")); const { username, homedir } = userInfo(); + const shellPath = getShell(); const bunEnv = { ...process.env, PATH: path, TMPDIR: tmpdirPath, USER: username, HOME: homedir, + SHELL: shellPath, FORCE_COLOR: "1", BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1", BUN_DEBUG_QUIET_LOGS: "1", @@ -604,7 +586,7 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) { * @returns {number} */ function getTestTimeout(testPath) { - if (/integration|3rd_party|docker/i.test(testPath)) { + if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) { return integrationTimeout; } return testTimeout; @@ -1080,9 +1062,10 @@ function getExecPath(bunExe) { /** * @param {string} target + * @param {string} [buildId] * @returns {Promise} */ -async function getExecPathFromBuildKite(target) { +async function getExecPathFromBuildKite(target, buildId) { if (existsSync(target) || target.includes("/")) { return getExecPath(target); } @@ -1090,23 +1073,27 @@ async function getExecPathFromBuildKite(target) { const releasePath = join(cwd, "release"); mkdirSync(releasePath, { recursive: true }); - const args = ["artifact", "download", "**", releasePath, "--step", target]; - const buildId = process.env["BUILDKITE_ARTIFACT_BUILD_ID"]; - if (buildId) { - args.push("--build", buildId); - } - - await spawnSafe({ - command: "buildkite-agent", - args, - }); - let zipPath; - for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) { - if (/^bun.*\.zip$/i.test(entry) && !entry.includes("-profile.zip")) { - zipPath = join(releasePath, entry); - break; + downloadLoop: for (let i = 0; i < 10; i++) { + const args = ["artifact", "download", "**", releasePath, "--step", target]; + if (buildId) { + args.push("--build", buildId); } + + await spawnSafe({ + command: "buildkite-agent", + args, + }); + + for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) { + if (/^bun.*\.zip$/i.test(entry) && !entry.includes("-profile.zip")) { + zipPath = join(releasePath, entry); + break downloadLoop; + } + } + + console.warn(`Waiting for ${target}.zip to be available...`); + await new Promise(resolve => setTimeout(resolve, i * 1000)); } if (!zipPath) { @@ -1115,13 +1102,15 @@ async function getExecPathFromBuildKite(target) { await unzip(zipPath, releasePath); - for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) { + const releaseFiles = readdirSync(releasePath, { recursive: true, encoding: "utf-8" }); + for (const entry of releaseFiles) { const execPath = join(releasePath, entry); - if (/bun(?:\.exe)?$/i.test(entry) && isExecutable(execPath)) { + if (/bun(?:\.exe)?$/i.test(entry) && statSync(execPath).isFile()) { return execPath; } } + console.warn(`Found ${releaseFiles.length} files in ${releasePath}:`); throw new Error(`Could not find executable from BuildKite: ${releasePath}`); } @@ -1466,8 +1455,39 @@ export async function main() { } printEnvironment(); + + // FIXME: Some DNS tests hang unless we set the DNS server to 8.8.8.8 + // It also appears to hang on 1.1.1.1, which could explain this issue: + // https://github.com/oven-sh/bun/issues/11136 + if (isWindows && isCI) { + await spawn("pwsh", [ + "-Command", + "Set-DnsClientServerAddress -InterfaceAlias 'Ethernet 4' -ServerAddresses ('8.8.8.8','8.8.4.4')", + ]); + } + const results = await runTests(); const ok = results.every(({ ok }) => ok); + + let waitForUser = false; + while (isCI) { + const userCount = getLoggedInUserCount(); + if (!userCount) { + if (waitForUser) { + console.log("No users logged in, exiting runner..."); + } + break; + } + + if (!waitForUser) { + startGroup("Summary"); + console.warn(`Found ${userCount} users logged in, keeping the runner alive until logout...`); + waitForUser = true; + } + + await new Promise(resolve => setTimeout(resolve, 60_000)); + } + process.exit(getExitCode(ok ? "pass" : "fail")); } diff --git a/scripts/utils.mjs b/scripts/utils.mjs index eb22222296..198712a34c 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -6,6 +6,7 @@ import { createHash } from "node:crypto"; import { appendFileSync, chmodSync, + copyFileSync, existsSync, mkdirSync, mkdtempSync, @@ -14,7 +15,7 @@ import { writeFileSync, } from "node:fs"; import { connect } from "node:net"; -import { hostname, tmpdir as nodeTmpdir, userInfo, release } from "node:os"; +import { hostname, tmpdir as nodeTmpdir, homedir as nodeHomedir, userInfo, release } from "node:os"; import { dirname, join, relative, resolve } from "node:path"; import { normalize as normalizeWindows } from "node:path/win32"; @@ -118,6 +119,8 @@ export function setEnv(name, value) { * @property {string} [cwd] * @property {number} [timeout] * @property {Record} [env] + * @property {boolean | ((error: Error) => boolean)} [throwOnError] + * @property {(error: Error) => boolean} [retryOnError] * @property {string} [stdin] * @property {boolean} [privileged] */ @@ -156,9 +159,6 @@ export function $(strings, ...values) { return result; } -/** @type {string[] | undefined} */ -let priviledgedCommand; - /** * @param {string[]} command * @param {SpawnOptions} options @@ -170,6 +170,9 @@ function parseCommand(command, options) { return command; } +/** @type {string[] | undefined} */ +let priviledgedCommand; + /** * @returns {string[]} */ @@ -203,6 +206,28 @@ function getPrivilegedCommand() { return (priviledgedCommand = []); } +/** @type {boolean | undefined} */ +let privileged; + +/** + * @returns {boolean} + */ +export function isPrivileged() { + if (typeof privileged !== "undefined") { + return privileged; + } + + const command = getPrivilegedCommand(); + if (command.length) { + const { error } = spawnSync(command); + privileged = !error; + } else { + privileged = false; + } + + return privileged; +} + /** * @param {string[]} command * @param {SpawnOptions} options @@ -279,6 +304,24 @@ export async function spawn(command, options = {}) { } } + if (error) { + const retryOnError = options["retryOnError"]; + if (typeof retryOnError === "function") { + if (retryOnError(error)) { + return spawn(command, options); + } + } + + const throwOnError = options["throwOnError"]; + if (typeof throwOnError === "function") { + if (throwOnError(error)) { + throw error; + } + } else if (throwOnError) { + throw error; + } + } + return { exitCode, signalCode, @@ -293,15 +336,8 @@ export async function spawn(command, options = {}) { * @param {SpawnOptions} options * @returns {Promise} */ -export async function spawnSafe(command, options) { - const result = await spawn(command, options); - - const { error } = result; - if (error) { - throw error; - } - - return result; +export async function spawnSafe(command, options = {}) { + return spawn(command, { throwOnError: true, ...options }); } /** @@ -313,11 +349,13 @@ export function spawnSync(command, options = {}) { const [cmd, ...args] = parseCommand(command, options); debugLog("$", cmd, ...args); + const stdin = options["stdin"]; const spawnOptions = { cwd: options["cwd"] ?? process.cwd(), timeout: options["timeout"] ?? undefined, env: options["env"] ?? undefined, - stdio: ["ignore", "pipe", "pipe"], + stdio: [typeof stdin === "undefined" ? "ignore" : "pipe", "pipe", "pipe"], + input: stdin, ...options, }; @@ -362,6 +400,24 @@ export function spawnSync(command, options = {}) { } } + if (error) { + const retryOnError = options["retryOnError"]; + if (typeof retryOnError === "function") { + if (retryOnError(error)) { + return spawn(command, options); + } + } + + const throwOnError = options["throwOnError"]; + if (typeof throwOnError === "function") { + if (throwOnError(error)) { + throw error; + } + } else if (throwOnError) { + throw error; + } + } + return { exitCode, signalCode, @@ -376,15 +432,8 @@ export function spawnSync(command, options = {}) { * @param {SpawnOptions} options * @returns {SpawnResult} */ -export function spawnSyncSafe(command, options) { - const result = spawnSync(command, options); - - const { error } = result; - if (error) { - throw error; - } - - return result; +export function spawnSyncSafe(command, options = {}) { + return spawnSync(command, { throwOnError: true, ...options }); } /** @@ -403,8 +452,8 @@ export function getWindowsExitReason(exitCode) { } /** - * @param {string} url - * @returns {URL} + * @param {string | URL} url + * @returns {URL | undefined} */ export function parseGitUrl(url) { const string = typeof url === "string" ? url : url.toString(); @@ -416,8 +465,20 @@ export function parseGitUrl(url) { if (/^https:\/\/github\.com\//.test(string)) { return new URL(string.slice(19).replace(/\.git$/, ""), githubUrl); } +} - throw new Error(`Unsupported git url: ${string}`); +/** + * @param {string | URL} url + * @returns {string | undefined} + */ +export function parseGitRepository(url) { + const parsed = parseGitUrl(url); + if (parsed) { + const { hostname, pathname } = parsed; + if (hostname == "github.com") { + return pathname.slice(1); + } + } } /** @@ -427,7 +488,7 @@ export function parseGitUrl(url) { export function getRepositoryUrl(cwd) { if (!cwd) { if (isBuildkite) { - const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO", false); + const repository = getEnv("BUILDKITE_REPO", false); if (repository) { return parseGitUrl(repository); } @@ -464,9 +525,18 @@ export function getRepository(cwd) { const url = getRepositoryUrl(cwd); if (url) { - const { hostname, pathname } = new URL(url); - if (hostname == "github.com") { - return pathname.slice(1); + return parseGitRepository(url); + } +} + +/** + * @returns {string | undefined} + */ +export function getPullRequestRepository() { + if (isBuildkite) { + const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false); + if (repository) { + return parseGitRepository(repository); } } } @@ -561,7 +631,7 @@ export function getBranch(cwd) { /** * @param {string} [cwd] - * @returns {string} + * @returns {string | undefined} */ export function getMainBranch(cwd) { if (!cwd) { @@ -684,7 +754,7 @@ export function isMergeQueue(cwd) { export function getGithubToken() { const cachedToken = getSecret("GITHUB_TOKEN", { required: false }); - if (typeof cachedToken === "string") { + if (typeof cachedToken === "string" || !which("gh")) { return cachedToken || undefined; } @@ -701,6 +771,7 @@ export function getGithubToken() { * @property {string} [body] * @property {Record} [headers] * @property {number} [timeout] + * @property {boolean} [cache] * @property {number} [retries] * @property {boolean} [json] * @property {boolean} [arrayBuffer] @@ -715,6 +786,9 @@ export function getGithubToken() { * @property {any} body */ +/** @type {Record} */ +let cachedResults; + /** * @param {string} url * @param {CurlOptions} [options] @@ -730,6 +804,15 @@ export async function curl(url, options = {}) { let arrayBuffer = options["arrayBuffer"]; let filename = options["filename"]; + let cacheKey; + let cache = options["cache"]; + if (cache) { + cacheKey = `${method} ${href}`; + if (cachedResults?.[cacheKey]) { + return cachedResults[cacheKey]; + } + } + if (typeof headers["Authorization"] === "undefined") { if (hostname === "api.github.com" || hostname === "uploads.github.com") { const githubToken = getGithubToken(); @@ -789,6 +872,11 @@ export async function curl(url, options = {}) { } } + if (cacheKey) { + cachedResults ||= {}; + cachedResults[cacheKey] = { status, statusText, error, body }; + } + return { status, statusText, @@ -813,6 +901,7 @@ export async function curlSafe(url, options) { return body; } +/** @type {Record | undefined} */ let cachedFiles; /** @@ -829,14 +918,13 @@ export function readFile(filename, options = {}) { } } - const relativePath = relative(process.cwd(), absolutePath); - debugLog("$", "cat", relativePath); + debugLog("$", "cat", absolutePath); let content; try { content = readFileSync(absolutePath, "utf-8"); } catch (cause) { - throw new Error(`Read failed: ${relativePath}`, { cause }); + throw new Error(`Read failed: ${absolutePath}`, { cause }); } if (options["cache"]) { @@ -847,22 +935,80 @@ export function readFile(filename, options = {}) { return content; } +/** + * @param {string} path + * @param {number} mode + */ +export function chmod(path, mode) { + debugLog("$", "chmod", path, mode); + chmodSync(path, mode); +} + /** * @param {string} filename * @param {string | Buffer} content * @param {object} [options] * @param {number} [options.mode] */ -export function writeFile(filename, content, options = {}) { - const parent = dirname(filename); - if (!existsSync(parent)) { - mkdirSync(parent, { recursive: true }); - } +export function writeFile(filename, content, options) { + mkdir(dirname(filename)); + debugLog("$", "touch", filename); writeFileSync(filename, content); - if (options["mode"]) { - chmodSync(filename, options["mode"]); + if (options?.mode) { + chmod(filename, options.mode); + } +} + +/** + * @param {string} source + * @param {string} destination + * @param {object} [options] + * @param {number} [options.mode] + */ +export function copyFile(source, destination, options) { + mkdir(dirname(destination)); + + debugLog("$", "cp", source, destination); + copyFileSync(source, destination); + + if (options?.mode) { + chmod(destination, options.mode); + } +} + +/** + * @param {string} path + * @param {object} [options] + * @param {number} [options.mode] + */ +export function mkdir(path, options = {}) { + if (existsSync(path)) { + return; + } + + debugLog("$", "mkdir", path); + mkdirSync(path, { ...options, recursive: true }); +} + +/** + * @param {string} path + */ +export function rm(path) { + let stats; + try { + stats = statSync(path); + } catch { + return; + } + + if (stats?.isDirectory()) { + debugLog("$", "rm", "-rf", path); + rmSync(path, { recursive: true, force: true }); + } else { + debugLog("$", "rm", "-f", path); + rmSync(path, { force: true }); } } @@ -894,10 +1040,16 @@ export function which(command, options = {}) { } } +/** + * @typedef {object} GitRef + * @property {string} [repository] + * @property {string} [commit] + */ + /** * @param {string} [cwd] - * @param {string} [base] - * @param {string} [head] + * @param {string | GitRef} [base] + * @param {string | GitRef} [head] * @returns {Promise} */ export async function getChangedFiles(cwd, base, head) { @@ -905,7 +1057,7 @@ export async function getChangedFiles(cwd, base, head) { head ||= getCommit(cwd); base ||= `${head}^1`; - const url = `https://api.github.com/repos/${repository}/compare/${base}...${head}`; + const url = new URL(`repos/${repository}/compare/${base}...${head}`, getGithubApiUrl()); const { error, body } = await curl(url, { json: true }); if (error) { @@ -997,17 +1149,32 @@ export function getBuildLabel() { } /** + * @returns {boolean | undefined} + */ +export function isBuildManual() { + if (isBuildkite) { + const buildSource = getEnv("BUILDKITE_SOURCE", false); + if (buildSource) { + const buildId = getEnv("BUILDKITE_REBUILT_FROM_BUILD_ID", false); + return buildSource === "ui" && !buildId; + } + } +} + +/** + * @param {string} [os] * @returns {number} */ -export function getBootstrapVersion() { - if (isWindows) { - return 0; // TODO - } - const scriptPath = join(import.meta.dirname, "bootstrap.sh"); +export function getBootstrapVersion(os) { + const scriptPath = join( + import.meta.dirname, + os === "windows" || (!os && isWindows) ? "bootstrap.ps1" : "bootstrap.sh", + ); const scriptContent = readFile(scriptPath, { cache: true }); const match = /# Version: (\d+)/.exec(scriptContent); if (match) { - return parseInt(match[1]); + const [, version] = match; + return parseInt(version); } return 0; } @@ -1046,9 +1213,8 @@ export async function getBuildkiteBuildNumber() { return; } - const { status, error, body } = await curl(`https://api.github.com/repos/${repository}/commits/${commit}/statuses`, { - json: true, - }); + const url = new URL(`repos/${repository}/commits/${commit}/statuses`, getGithubApiUrl()); + const { status, error, body } = await curl(url, { json: true }); if (status === 404) { return; } @@ -1147,7 +1313,7 @@ export async function getLastSuccessfulBuild() { } while (url) { - const { error, body } = await curl(`${url}.json`, { json: true }); + const { error, body } = await curl(`${url}.json`, { json: true, cache: true }); if (error) { return; } @@ -1190,7 +1356,7 @@ export async function uploadArtifact(filename, cwd) { * @returns {string} */ export function stripAnsi(string) { - return string.replace(/\u001b\[\d+m/g, ""); + return string.replace(/\u001b\[[0-9;]*[a-zA-Z]/g, ""); } /** @@ -1250,6 +1416,13 @@ export function escapePowershell(string) { return string.replace(/'/g, "''").replace(/`/g, "``"); } +/** + * @returns {string} + */ +export function homedir() { + return nodeHomedir(); +} + /** * @returns {string} */ @@ -1281,6 +1454,16 @@ export function tmpdir() { return nodeTmpdir(); } +/** + * @param {string} [prefix] + * @param {string} [filename] + * @returns {string} + */ +export function mkdtemp(prefix, filename) { + const tmpPath = mkdtempSync(join(tmpdir(), prefix || "bun-")); + return filename ? join(tmpPath, filename) : tmpPath; +} + /** * @param {string} filename * @param {string} [output] @@ -1297,6 +1480,30 @@ export async function unzip(filename, output) { return destination; } +/** + * @param {string} value + * @returns {boolean | undefined} + */ +export function parseBoolean(value) { + if (/^(true|yes|1|on)$/i.test(value)) { + return true; + } + if (/^(false|no|0|off)$/i.test(value)) { + return false; + } +} + +/** + * @param {string} value + * @returns {number | undefined} + */ +export function parseNumber(value) { + const number = Number(value); + if (!isNaN(number)) { + return number; + } +} + /** * @param {string} string * @returns {"darwin" | "linux" | "windows"} @@ -1343,9 +1550,13 @@ export function getArch() { } /** - * @returns {string} + * @returns {string | undefined} */ export function getKernel() { + if (isWindows) { + return; + } + const kernel = release(); const match = /(\d+)\.(\d+)(?:\.(\d+))?/.exec(kernel); @@ -1496,7 +1707,7 @@ export async function getTargetDownloadUrl(target, release) { return canaryUrl; } - const statusUrl = new URL(`https://api.github.com/repos/oven-sh/bun/commits/${release}/status`).toString(); + const statusUrl = new URL(`repos/oven-sh/bun/commits/${release}/status`, getGithubApiUrl()); const { error, body } = await curl(statusUrl, { json: true }); if (error) { throw new Error(`Failed to fetch commit status: ${release}`, { cause: error }); @@ -1690,9 +1901,10 @@ export function getDistro() { const releasePath = "/etc/os-release"; if (existsSync(releasePath)) { const releaseFile = readFile(releasePath, { cache: true }); - const match = releaseFile.match(/^ID=\"?(.*)\"?/m); + const match = releaseFile.match(/^ID=(.*)/m); if (match) { - return match[1]; + const [, id] = match; + return id.includes('"') ? JSON.parse(id) : id; } } @@ -1735,9 +1947,10 @@ export function getDistroVersion() { const releasePath = "/etc/os-release"; if (existsSync(releasePath)) { const releaseFile = readFile(releasePath, { cache: true }); - const match = releaseFile.match(/^VERSION_ID=\"?(.*)\"?/m); + const match = releaseFile.match(/^VERSION_ID=(.*)/m); if (match) { - return match[1]; + const [, release] = match; + return release.includes('"') ? JSON.parse(release) : release; } } @@ -1755,6 +1968,25 @@ export function getDistroVersion() { } } +/** + * @returns {string | undefined} + */ +export function getShell() { + if (isWindows) { + const pwsh = which(["pwsh", "powershell"]); + if (pwsh) { + return pwsh; + } + } + + const sh = which(["bash", "sh"]); + if (sh) { + return sh; + } + + return getEnv("SHELL", false); +} + /** * @typedef {"aws" | "google"} Cloud */ @@ -1808,11 +2040,6 @@ export async function isAws() { return stdout.includes("Amazon"); } } - - const instanceId = await getCloudMetadata("instance-id", "google"); - if (instanceId) { - return true; - } } if (await checkAws()) { @@ -1846,11 +2073,6 @@ export async function isGoogleCloud() { } } } - - const instanceId = await getCloudMetadata("id", "google"); - if (instanceId) { - return true; - } } if (await detectGoogleCloud()) { @@ -1902,8 +2124,9 @@ export async function getCloudMetadata(name, cloud) { throw new Error(`Unsupported cloud: ${inspect(cloud)}`); } - const { error, body } = await curl(url, { headers, retries: 0 }); + const { error, body } = await curl(url, { headers, retries: 10 }); if (error) { + console.warn("Failed to get cloud metadata:", error); return; } @@ -1918,6 +2141,7 @@ export async function getCloudMetadata(name, cloud) { export function getCloudMetadataTag(tag, cloud) { const metadata = { "aws": `tags/instance/${tag}`, + "google": `labels/${tag.replace(":", "-")}`, }; return getCloudMetadata(metadata, cloud); @@ -1952,6 +2176,7 @@ export async function getBuildMetadata(name) { */ export async function waitForPort(options) { const { hostname, port, retries = 10 } = options; + console.log("Connecting...", `${hostname}:${port}`); let cause; for (let i = 0; i < retries; i++) { @@ -1963,6 +2188,7 @@ export async function waitForPort(options) { const socket = connect({ host: hostname, port }); socket.on("connect", () => { socket.destroy(); + console.log("Connected:", `${hostname}:${port}`); resolve(); }); socket.on("error", error => { @@ -1978,12 +2204,17 @@ export async function waitForPort(options) { } } + console.error("Connection failed:", `${hostname}:${port}`); return cause; } /** * @returns {Promise} */ export async function getCanaryRevision() { + if (isPullRequest() || isFork()) { + return 1; + } + const repository = getRepository() || "oven-sh/bun"; const { error: releaseError, body: release } = await curl( new URL(`repos/${repository}/releases/latest`, getGithubApiUrl()), @@ -2025,6 +2256,269 @@ export function getGithubUrl() { return new URL(getEnv("GITHUB_SERVER_URL", false) || "https://github.com"); } +/** + * @param {string} string + * @returns {string} + */ +export function sha256(string) { + return createHash("sha256").update(Buffer.from(string)).digest("hex"); +} + +/** + * @param {string} [level] + * @returns {"info" | "warning" | "error"} + */ +function parseLevel(level) { + if (/error|fatal|fail/i.test(level)) { + return "error"; + } + if (/warn|caution/i.test(level)) { + return "warning"; + } + return "notice"; +} + +/** + * @typedef {Object} Annotation + * @property {string} title + * @property {string} [content] + * @property {string} [source] + * @property {"notice" | "warning" | "error"} [level] + * @property {string} [url] + * @property {string} [filename] + * @property {number} [line] + * @property {number} [column] + * @property {Record} [metadata] + */ + +/** + * @typedef {Object} AnnotationContext + * @property {string} [cwd] + * @property {string[]} [command] + */ + +/** + * @param {Record} options + * @param {AnnotationContext} [context] + * @returns {Annotation} + */ +export function parseAnnotation(options, context) { + const source = options["source"]; + const level = parseLevel(options["level"]); + const title = options["title"] || (source ? `${source} ${level}` : level); + const filename = options["filename"]; + const line = parseInt(options["line"]) || undefined; + const column = parseInt(options["column"]) || undefined; + const content = options["content"]; + const lines = Array.isArray(content) ? content : content?.split(/(\r?\n)/) || []; + const metadata = Object.fromEntries( + Object.entries(options["metadata"] || {}).filter(([, value]) => value !== undefined), + ); + + const relevantLines = []; + let lastLine; + for (const line of lines) { + if (!lastLine && !line.trim()) { + continue; + } + lastLine = line.trim(); + relevantLines.push(line); + } + + return { + source, + title, + level, + filename, + line, + column, + content: relevantLines.join("\n"), + metadata, + }; +} + +/** + * @typedef {Object} AnnotationResult + * @property {Annotation[]} annotations + * @property {string} content + * @property {string} preview + */ + +/** + * @param {string} content + * @param {AnnotationOptions} [options] + * @returns {AnnotationResult} + */ +export function parseAnnotations(content, options = {}) { + /** @type {Annotation[]} */ + const annotations = []; + + const originalLines = content.split(/(\r?\n)/); + const lines = []; + + for (let i = 0; i < originalLines.length; i++) { + const originalLine = originalLines[i]; + const line = stripAnsi(originalLine).trim(); + const bufferedLines = [originalLine]; + + /** + * @param {RegExp} pattern + * @param {number} [maxLength] + * @returns {{lines: string[], match: string[] | undefined}} + */ + const readUntil = (pattern, maxLength = 100) => { + let length = 0; + let match; + + while (i + length <= originalLines.length && length < maxLength) { + const originalLine = originalLines[i + length++]; + const line = stripAnsi(originalLine).trim(); + const patternMatch = pattern.exec(line); + if (patternMatch) { + match = patternMatch; + break; + } + } + + const lines = originalLines.slice(i + 1, (i += length)); + bufferedLines.push(...lines); + return { lines, match }; + }; + + // Github Actions + // https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions + const githubAnnotation = line.match(/^::(error|warning|notice|debug)(?: (.*))?::(.*)$/); + if (githubAnnotation) { + const [, level, attributes, content] = githubAnnotation; + const { file, line, col, title } = Object.fromEntries( + attributes?.split(",")?.map(entry => entry.split("=")) || {}, + ); + + const annotation = parseAnnotation({ + level, + filename: file, + line, + column: col, + content: unescapeGitHubAction(title) + unescapeGitHubAction(content), + }); + annotations.push(annotation); + continue; + } + + const githubCommand = line.match(/^::(group|endgroup|add-mask|stop-commands)::$/); + if (githubCommand) { + continue; + } + + // CMake error format + // e.g. CMake Error at /path/to/thing.cmake:123 (message): ... + const cmakeMessage = line.match(/CMake (Error|Warning|Deprecation Warning) at (.*):(\d+)/i); + if (cmakeMessage) { + let [, level, filename, line] = cmakeMessage; + + const { match: callStackMatch } = readUntil(/Call Stack \(most recent call first\)/i); + if (callStackMatch) { + const { match: callFrameMatch } = readUntil(/(CMakeLists\.txt|[^\s]+\.cmake):(\d+)/i, 5); + if (callFrameMatch) { + const [, frame, location] = callFrameMatch; + filename = frame; + line = location; + } + } + + const annotation = parseAnnotation({ + source: "cmake", + level, + filename, + line, + content: bufferedLines, + }); + annotations.push(annotation); + } + + // Zig compiler error + // e.g. /path/to/build.zig:8:19: error: ... + const zigMessage = line.match(/^(.+\.zig):(\d+):(\d+): (error|warning): (.+)$/); + if (zigMessage) { + const [, filename, line, column, level] = zigMessage; + + const { match: callStackMatch } = readUntil(/referenced by:/i); + if (callStackMatch) { + readUntil(/(.+\.zig):(\d+):(\d+)/i, 5); + } + + const annotation = parseAnnotation({ + source: "zig", + level, + filename, + line, + column, + content: bufferedLines, + }); + annotations.push(annotation); + } + + const nodeJsError = line.match(/^file:\/\/(.+\.(?:c|m)js):(\d+)/i); + if (nodeJsError) { + const [, filename, line] = nodeJsError; + + let metadata; + const { match: nodeJsVersionMatch } = readUntil(/^Node\.js v(\d+\.\d+\.\d+)/i); + if (nodeJsVersionMatch) { + const [, version] = nodeJsVersionMatch; + metadata = { + "node-version": version, + }; + } + + const annotation = parseAnnotation({ + source: "node", + level: "error", + filename, + line, + content: bufferedLines, + metadata, + }); + annotations.push(annotation); + } + + const clangError = line.match(/^(.+\.(?:cpp|c|m|h)):(\d+):(\d+): (error|warning): (.+)/i); + if (clangError) { + const [, filename, line, column, level] = clangError; + readUntil(/^\d+ (?:error|warning)s? generated/); + const annotation = parseAnnotation({ + source: "clang", + level, + filename, + line, + column, + content: bufferedLines, + }); + annotations.push(annotation); + } + + const shellMessage = line.match(/(.+\.sh): line (\d+): (.+)/i); + if (shellMessage) { + const [, filename, line] = shellMessage; + const annotation = parseAnnotation({ + source: "shell", + level: "error", + filename, + line, + content: bufferedLines, + }); + annotations.push(annotation); + } + + lines.push(originalLine); + } + + return { + annotations, + content: lines.join("\n"), + }; +} + /** * @param {object} obj * @param {number} indent @@ -2061,7 +2555,12 @@ export function toYaml(obj, indent = 0) { } if ( typeof value === "string" && - (value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n")) + (value.includes(":") || + value.includes("#") || + value.includes("'") || + value.includes('"') || + value.includes("\n") || + value.includes("*")) ) { result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`; continue; @@ -2071,11 +2570,19 @@ export function toYaml(obj, indent = 0) { return result; } +/** @type {string | undefined} */ +let lastGroup; + /** * @param {string} title * @param {function} [fn] */ export function startGroup(title, fn) { + if (lastGroup && lastGroup !== title) { + lastGroup = title; + endGroup(); + } + if (isGithubAction) { console.log(`::group::${stripAnsi(title)}`); } else if (isBuildkite) { @@ -2099,6 +2606,10 @@ export function startGroup(title, fn) { } export function endGroup() { + if (lastGroup) { + lastGroup = undefined; + } + if (isGithubAction) { console.log("::endgroup::"); } else { @@ -2125,12 +2636,12 @@ export function printEnvironment() { console.log("Username:", getUsername()); console.log("Working Directory:", process.cwd()); console.log("Temporary Directory:", tmpdir()); + if (process.isBun) { + console.log("Bun Version:", Bun.version, Bun.revision); + } else { + console.log("Node Version:", process.version); + } }); - if (isPosix) { - startGroup("ulimit -a", () => { - spawnSync(["ulimit", "-a"], { stdio: ["ignore", "inherit", "inherit"] }); - }); - } if (isCI) { startGroup("Environment", () => { @@ -2138,6 +2649,15 @@ export function printEnvironment() { console.log(`${key}:`, value); } }); + + if (isPosix) { + startGroup("Limits", () => { + const shell = which(["sh", "bash"]); + if (shell) { + spawnSync([shell, "-c", "ulimit -a"], { stdio: "inherit" }); + } + }); + } } startGroup("Repository", () => { @@ -2159,7 +2679,71 @@ export function printEnvironment() { startGroup("CI", () => { console.log("Build ID:", getBuildId()); console.log("Build Label:", getBuildLabel()); - console.log("Build URL:", `${getBuildUrl()}`); + console.log("Build URL:", getBuildUrl()?.toString()); }); } } + +/** + * @returns {number | undefined} + */ +export function getLoggedInUserCount() { + if (isWindows) { + const pwsh = which(["pwsh", "powershell"]); + if (pwsh) { + const { error, stdout } = spawnSync([ + pwsh, + "-Command", + `Get-CimInstance -ClassName Win32_Process -Filter "Name = 'sshd.exe'" | Get-CimAssociatedInstance -Association Win32_SessionProcess | Get-CimAssociatedInstance -Association Win32_LoggedOnUser | Where-Object {$_.Name -ne 'SYSTEM'} | Measure-Object | Select-Object -ExpandProperty Count`, + ]); + if (!error) { + return parseInt(stdout) || undefined; + } + } + } + + const { error, stdout } = spawnSync(["who"]); + if (!error) { + return stdout.split("\n").filter(line => /tty|pts/i.test(line)).length; + } +} + +/** @typedef {keyof typeof emojiMap} Emoji */ + +const emojiMap = { + darwin: ["🍎", "darwin"], + linux: ["🐧", "linux"], + debian: ["🐧", "debian"], + ubuntu: ["🐧", "ubuntu"], + alpine: ["🐧", "alpine"], + aws: ["☁️", "aws"], + amazonlinux: ["🐧", "aws"], + windows: ["🪟", "windows"], + true: ["✅", "white_check_mark"], + false: ["❌", "x"], + debug: ["🐞", "bug"], + assert: ["🔍", "mag"], + release: ["🏆", "trophy"], + gear: ["⚙️", "gear"], + clipboard: ["📋", "clipboard"], + rocket: ["🚀", "rocket"], +}; + +/** + * @param {Emoji} emoji + * @returns {string} + */ +export function getEmoji(emoji) { + const [unicode] = emojiMap[emoji] || []; + return unicode || ""; +} + +/** + * @param {Emoji} emoji + * @returns {string} + * @link https://github.com/buildkite/emojis#emoji-reference + */ +export function getBuildkiteEmoji(emoji) { + const [, name] = emojiMap[emoji] || []; + return name ? `:${name}:` : ""; +} diff --git a/src/bun.js/bindings/InternalModuleRegistry.cpp b/src/bun.js/bindings/InternalModuleRegistry.cpp index fc3407c702..2f5d95c92f 100644 --- a/src/bun.js/bindings/InternalModuleRegistry.cpp +++ b/src/bun.js/bindings/InternalModuleRegistry.cpp @@ -74,7 +74,7 @@ JSC::JSValue generateModule(JSC::JSGlobalObject* globalObject, JSC::VM& vm, cons return result; } -#if BUN_DEBUG +#if BUN_DYNAMIC_JS_LOAD_PATH JSValue initializeInternalModuleFromDisk( JSGlobalObject* globalObject, VM& vm, diff --git a/test/bun.lockb b/test/bun.lockb index 699279fcb2..592e2bd028 100755 Binary files a/test/bun.lockb and b/test/bun.lockb differ diff --git a/test/harness.ts b/test/harness.ts index 6d1c6d36a4..0921b1dcc0 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -5,7 +5,7 @@ import { readFile, readlink, writeFile } from "fs/promises"; import fs, { closeSync, openSync } from "node:fs"; import os from "node:os"; import { dirname, isAbsolute, join } from "path"; -import detect_libc from "detect-libc"; +import detectLibc from "detect-libc"; type Awaitable = T | Promise; @@ -19,7 +19,8 @@ export const isIntelMacOS = isMacOS && process.arch === "x64"; export const isDebug = Bun.version.includes("debug"); export const isCI = process.env.CI !== undefined; export const isBuildKite = process.env.BUILDKITE === "true"; -export const libc_family = detect_libc.familySync(); +export const libcFamily = detectLibc.familySync() as "glibc" | "musl"; +export const isVerbose = process.env.DEBUG === "1"; // Use these to mark a test as flaky or broken. // This will help us keep track of these tests. @@ -1372,7 +1373,7 @@ export function waitForFileToExist(path: string, interval: number) { export function libcPathForDlopen() { switch (process.platform) { case "linux": - switch (libc_family) { + switch (libcFamily) { case "glibc": return "libc.so.6"; case "musl": diff --git a/test/integration/next-pages/test/dev-server-puppeteer.ts b/test/integration/next-pages/test/dev-server-puppeteer.ts index b529afbf68..06bf56b425 100644 --- a/test/integration/next-pages/test/dev-server-puppeteer.ts +++ b/test/integration/next-pages/test/dev-server-puppeteer.ts @@ -1,8 +1,9 @@ import assert from "assert"; import { copyFileSync } from "fs"; import { join } from "path"; -import { ConsoleMessage, Page, launch } from "puppeteer"; - +import type { ConsoleMessage, Page } from "puppeteer"; +import { launch } from "puppeteer"; +import { which } from "bun"; const root = join(import.meta.dir, "../"); copyFileSync(join(root, "src/Counter1.txt"), join(root, "src/Counter.tsx")); @@ -12,28 +13,38 @@ if (process.argv.length > 2) { url = process.argv[2]; } +const browserPath = which("chromium-browser") || which("chromium") || which("chrome") || undefined; +if (!browserPath) { + console.warn("Since a Chromium browser was not found, it will be downloaded by Puppeteer."); +} + const b = await launch({ - // While puppeteer is migrating to their new headless: `true` mode, - // this causes strange issues on macOS in the cloud (AWS and MacStadium). - // - // There is a GitHub issue, but the discussion is unhelpful: - // https://github.com/puppeteer/puppeteer/issues/10153 - // - // Fixes: 'TargetCloseError: Protocol error (Target.setAutoAttach): Target closed' - headless: "shell", + // On macOS, there are issues using the new headless mode. + // "TargetCloseError: Protocol error (Target.setAutoAttach): Target closed" + headless: process.platform === "darwin" ? "shell" : true, + // Inherit the stdout and stderr of the browser process. dumpio: true, + // Prefer to use a pipe to connect to the browser, instead of a WebSocket. pipe: true, + // Disable timeouts. + timeout: 0, + protocolTimeout: 0, + // Specify that chrome should be used, for consistent test results. + // If a browser path is not found, it will be downloaded. + browser: "chrome", + executablePath: browserPath, args: [ - // Fixes: 'dock_plist is not an NSDictionary' + // On Linux, there are issues with the sandbox, so disable it. + // On macOS, this fixes: "dock_plist is not an NSDictionary" "--no-sandbox", - "--single-process", "--disable-setuid-sandbox", + + // On Docker, the default /dev/shm is too small for Chrome, which causes + // crashes when rendering large pages, so disable it. "--disable-dev-shm-usage", - // Fixes: 'Navigating frame was detached' + + // Fixes: "Navigating frame was detached" "--disable-features=site-per-process", - // Uncomment if you want debug logs from Chromium: - // "--enable-logging=stderr", - // "--v=1", ], }); diff --git a/test/js/node/cluster/test-docs-http-server.ts b/test/js/node/cluster/test-docs-http-server.ts index 91547ed7ae..a72498d227 100644 --- a/test/js/node/cluster/test-docs-http-server.ts +++ b/test/js/node/cluster/test-docs-http-server.ts @@ -1,8 +1,14 @@ +import { isBroken, isWindows } from "harness"; import assert from "node:assert"; import cluster from "node:cluster"; import http from "node:http"; import { availableParallelism } from "node:os"; +if (isWindows && isBroken) { + console.log("Skipping on Windows because it does not work when there are more than 1 CPU"); + process.exit(0); +} + const numCPUs = availableParallelism(); let workers = 0; diff --git a/test/package.json b/test/package.json index f643ef682d..5fc461dc32 100644 --- a/test/package.json +++ b/test/package.json @@ -4,7 +4,9 @@ "@types/react": "18.0.28", "@types/react-dom": "18.0.11", "@types/supertest": "2.0.12", - "@types/utf-8-validate": "5.0.0" + "@types/utf-8-validate": "5.0.0", + "@types/ws": "8.5.10", + "@types/puppeteer": "7.0.4" }, "dependencies": { "@azure/service-bus": "7.9.4", @@ -16,7 +18,6 @@ "@remix-run/serve": "2.10.3", "@resvg/resvg-js": "2.4.1", "@swc/core": "1.3.38", - "@types/ws": "8.5.10", "aws-cdk-lib": "2.148.0", "axios": "1.6.8", "body-parser": "1.20.2",