mirror of
https://github.com/oven-sh/bun
synced 2026-02-26 03:27:23 +01:00
Compare commits
2 Commits
claude/fix
...
claude/mac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b2c8dc1eee | ||
|
|
7f8b985c69 |
78
.agent/agent.mjs
Normal file
78
.agent/agent.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
19
.aikido
19
.aikido
@@ -1,19 +0,0 @@
|
||||
exclude:
|
||||
paths:
|
||||
- test
|
||||
- scripts
|
||||
- bench
|
||||
- packages/bun-lambda
|
||||
- packages/bun-release
|
||||
- packages/bun-wasm
|
||||
- packages/bun-vscode
|
||||
- packages/bun-plugin-yaml
|
||||
- packages/bun-plugin-svelte
|
||||
- packages/bun-native-plugin-rs
|
||||
- packages/bun-native-bundler-plugin-api
|
||||
- packages/bun-inspector-protocol
|
||||
- packages/bun-inspector-frontend
|
||||
- packages/bun-error
|
||||
- packages/bun-debug-adapter-protocol
|
||||
- packages/bun-build-mdx-rs
|
||||
- packages/@types/bun
|
||||
@@ -48,14 +48,6 @@ RUN --mount=type=tmpfs,target=/tmp \
|
||||
wget -O /tmp/cmake.sh "$cmake_url" && \
|
||||
sh /tmp/cmake.sh --skip-license --prefix=/usr
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
sccache_version="0.12.0" && \
|
||||
arch=$(uname -m) && \
|
||||
sccache_url="https://github.com/mozilla/sccache/releases/download/v${sccache_version}/sccache-v${sccache_version}-${arch}-unknown-linux-musl.tar.gz" && \
|
||||
wget -O /tmp/sccache.tar.gz "$sccache_url" && \
|
||||
tar -xzf /tmp/sccache.tar.gz -C /tmp && \
|
||||
install -m755 /tmp/sccache-v${sccache_version}-${arch}-unknown-linux-musl/sccache /usr/local/bin
|
||||
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
|
||||
@@ -118,14 +110,14 @@ ARG BUILDKITE_AGENT_TAGS
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
|
||||
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
|
||||
echo "Downloading buildkite" && \
|
||||
echo "Downloading buildkite" && \
|
||||
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
|
||||
mkdir -p /tmp/buildkite-agent && \
|
||||
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
|
||||
@@ -133,20 +125,6 @@ RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64";
|
||||
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
# The following is necessary to configure buildkite to use a stable
|
||||
# checkout directory. sccache hashes absolute paths into its cache keys,
|
||||
# so if buildkite uses a different checkout path each time (which it does
|
||||
# by default), sccache will be useless.
|
||||
RUN mkdir -p -m 755 /var/lib/buildkite-agent/hooks && \
|
||||
cat <<'EOF' > /var/lib/buildkite-agent/hooks/environment
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
export BUILDKITE_BUILD_CHECKOUT_PATH=/var/lib/buildkite-agent/build
|
||||
EOF
|
||||
|
||||
RUN chmod 744 /var/lib/buildkite-agent/hooks/environment
|
||||
|
||||
COPY ../*/agent.mjs /var/bun/scripts/
|
||||
|
||||
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
|
||||
@@ -169,7 +147,7 @@ COPY . /workspace/bun
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
@@ -183,4 +161,4 @@ RUN --mount=type=tmpfs,target=/workspace/bun/build \
|
||||
ls -la \
|
||||
&& bun run build:release \
|
||||
&& mkdir -p /target \
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
getEmoji,
|
||||
getEnv,
|
||||
getLastSuccessfulBuild,
|
||||
getSecret,
|
||||
isBuildkite,
|
||||
isBuildManual,
|
||||
isFork,
|
||||
@@ -31,7 +30,7 @@ import {
|
||||
} from "../scripts/utils.mjs";
|
||||
|
||||
/**
|
||||
* @typedef {"linux" | "darwin" | "windows" | "freebsd"} Os
|
||||
* @typedef {"linux" | "darwin" | "windows"} Os
|
||||
* @typedef {"aarch64" | "x64"} Arch
|
||||
* @typedef {"musl"} Abi
|
||||
* @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro
|
||||
@@ -109,12 +108,11 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
{ os: "freebsd", arch: "x64", release: "14.3" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -135,9 +133,9 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
|
||||
];
|
||||
@@ -225,7 +223,7 @@ function getImageName(platform, options) {
|
||||
* @param {number} [limit]
|
||||
* @link https://buildkite.com/docs/pipelines/command-step#retry-attributes
|
||||
*/
|
||||
function getRetry() {
|
||||
function getRetry(limit = 0) {
|
||||
return {
|
||||
manual: {
|
||||
permit_on_passed: true,
|
||||
@@ -294,7 +292,7 @@ function getEc2Agent(platform, options, ec2Options) {
|
||||
* @returns {string}
|
||||
*/
|
||||
function getCppAgent(platform, options) {
|
||||
const { os, arch } = platform;
|
||||
const { os, arch, distro } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -305,34 +303,9 @@ function getCppAgent(platform, options) {
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "c8g.4xlarge" : "c7i.4xlarge",
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getLinkBunAgent(platform, options) {
|
||||
const { os, arch } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
queue: `build-${os}`,
|
||||
os,
|
||||
arch,
|
||||
};
|
||||
}
|
||||
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "r8g.large" : "r7i.large",
|
||||
});
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "r8g.xlarge" : "r7i.xlarge",
|
||||
instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge",
|
||||
cpuCount: 32,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -345,7 +318,7 @@ function getZigPlatform() {
|
||||
arch: "aarch64",
|
||||
abi: "musl",
|
||||
distro: "alpine",
|
||||
release: "3.22",
|
||||
release: "3.21",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -354,7 +327,14 @@ function getZigPlatform() {
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getZigAgent(_platform, options) {
|
||||
function getZigAgent(platform, options) {
|
||||
const { arch } = platform;
|
||||
|
||||
// Uncomment to restore to using macOS on-prem for Zig.
|
||||
// return {
|
||||
// queue: "build-zig",
|
||||
// };
|
||||
|
||||
return getEc2Agent(getZigPlatform(), options, {
|
||||
instanceType: "r8g.large",
|
||||
});
|
||||
@@ -366,7 +346,7 @@ function getZigAgent(_platform, options) {
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getTestAgent(platform, options) {
|
||||
const { os, arch, profile } = platform;
|
||||
const { os, arch } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -376,7 +356,7 @@ function getTestAgent(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: delete this block when we upgrade to mimalloc v3
|
||||
// TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead.
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
@@ -386,13 +366,6 @@ function getTestAgent(platform, options) {
|
||||
}
|
||||
|
||||
if (arch === "aarch64") {
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -400,13 +373,6 @@ function getTestAgent(platform, options) {
|
||||
});
|
||||
}
|
||||
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -443,17 +409,28 @@ function getBuildEnv(target, options) {
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getBuildCommand(target, options, label) {
|
||||
function getBuildCommand(target, options) {
|
||||
const { profile } = target;
|
||||
const buildProfile = profile || "release";
|
||||
|
||||
if (target.os === "windows" && label === "build-bun") {
|
||||
// Only sign release builds, not canary builds (DigiCert charges per signature)
|
||||
const enableSigning = !options.canary ? " -DENABLE_WINDOWS_CODESIGNING=ON" : "";
|
||||
return `bun run build:${buildProfile}${enableSigning}`;
|
||||
}
|
||||
const label = profile || "release";
|
||||
return `bun run build:${label}`;
|
||||
}
|
||||
|
||||
return `bun run build:${buildProfile}`;
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildVendorStep(platform, options) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-vendor`,
|
||||
label: `${getTargetLabel(platform)} - build-vendor`,
|
||||
agents: getCppAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
command: `${getBuildCommand(platform, options)} --target dependencies`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -505,9 +482,9 @@ function getBuildZigStep(platform, options) {
|
||||
const toolchain = getBuildToolchain(platform);
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-zig`,
|
||||
retry: getRetry(),
|
||||
label: `${getTargetLabel(platform)} - build-zig`,
|
||||
agents: getZigAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
command: `${getBuildCommand(platform, options)} --target bun-zig --toolchain ${toolchain}`,
|
||||
@@ -525,15 +502,14 @@ function getLinkBunStep(platform, options) {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
depends_on: [`${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
agents: getCppAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
command: `${getBuildCommand(platform, options, "build-bun")} --target bun`,
|
||||
command: `${getBuildCommand(platform, options)} --target bun`,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -557,6 +533,7 @@ function getBuildBunStep(platform, options) {
|
||||
/**
|
||||
* @typedef {Object} TestOptions
|
||||
* @property {string} [buildId]
|
||||
* @property {boolean} [unifiedTests]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {boolean} [dryRun]
|
||||
*/
|
||||
@@ -569,7 +546,7 @@ function getBuildBunStep(platform, options) {
|
||||
*/
|
||||
function getTestBunStep(platform, options, testOptions = {}) {
|
||||
const { os, profile } = platform;
|
||||
const { buildId, testFiles } = testOptions;
|
||||
const { buildId, unifiedTests, testFiles } = testOptions;
|
||||
|
||||
const args = [`--step=${getTargetKey(platform)}-build-bun`];
|
||||
if (buildId) {
|
||||
@@ -591,11 +568,8 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
agents: getTestAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
env: {
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
},
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" ? 45 : 30,
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
@@ -660,7 +634,7 @@ function getReleaseStep(buildPlatforms, options) {
|
||||
agents: {
|
||||
queue: "test-darwin",
|
||||
},
|
||||
depends_on: buildPlatforms.filter(p => p.os !== "freebsd").map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
env: {
|
||||
CANARY: revision,
|
||||
},
|
||||
@@ -773,6 +747,8 @@ function getBenchmarkStep() {
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
* @property {string[]} [testFiles]
|
||||
* @property {boolean} [unifiedBuilds]
|
||||
* @property {boolean} [unifiedTests]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -943,6 +919,22 @@ function getOptionsStep() {
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
{
|
||||
key: "unified-builds",
|
||||
select: "Do you want to build each platform in a single step?",
|
||||
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
{
|
||||
key: "unified-tests",
|
||||
select: "Do you want to run tests in a single step?",
|
||||
hint: "If true, tests will not be split into separate steps (this will be very slow)",
|
||||
required: false,
|
||||
default: "false",
|
||||
options: booleanOptions,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
@@ -1008,6 +1000,8 @@ async function getPipelineOptions() {
|
||||
buildImages: parseBoolean(options["build-images"]),
|
||||
publishImages: parseBoolean(options["publish-images"]),
|
||||
testFiles: parseArray(options["test-files"]),
|
||||
unifiedBuilds: parseBoolean(options["unified-builds"]),
|
||||
unifiedTests: parseBoolean(options["unified-tests"]),
|
||||
buildPlatforms: buildPlatformKeys?.length
|
||||
? buildPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...buildPlatformsMap.get(key), profile })))
|
||||
: Array.from(buildPlatformsMap.values()),
|
||||
@@ -1073,7 +1067,7 @@ async function getPipeline(options = {}) {
|
||||
const imagePlatforms = new Map(
|
||||
buildImages || publishImages
|
||||
? [...buildPlatforms, ...testPlatforms]
|
||||
.filter(({ os }) => os !== "darwin")
|
||||
.filter(({ os }) => os === "linux" || os === "windows")
|
||||
.map(platform => [getImageKey(platform), platform])
|
||||
: [],
|
||||
);
|
||||
@@ -1089,7 +1083,7 @@ async function getPipeline(options = {}) {
|
||||
});
|
||||
}
|
||||
|
||||
let { skipBuilds, forceBuilds, dryRun } = options;
|
||||
let { skipBuilds, forceBuilds, unifiedBuilds, dryRun } = options;
|
||||
dryRun = dryRun || !!buildImages;
|
||||
|
||||
/** @type {string | undefined} */
|
||||
@@ -1107,13 +1101,10 @@ async function getPipeline(options = {}) {
|
||||
const includeASAN = !isMainBranch();
|
||||
|
||||
if (!buildId) {
|
||||
let relevantBuildPlatforms = includeASAN
|
||||
const relevantBuildPlatforms = includeASAN
|
||||
? buildPlatforms
|
||||
: buildPlatforms.filter(({ profile }) => profile !== "asan");
|
||||
|
||||
// run build-image but no build-bun yet
|
||||
relevantBuildPlatforms = relevantBuildPlatforms.filter(({ os }) => os !== "freebsd");
|
||||
|
||||
steps.push(
|
||||
...relevantBuildPlatforms.map(target => {
|
||||
const imageKey = getImageKey(target);
|
||||
@@ -1123,16 +1114,13 @@ async function getPipeline(options = {}) {
|
||||
dependsOn.push(`${imageKey}-build-image`);
|
||||
}
|
||||
|
||||
const steps = [];
|
||||
steps.push(getBuildCppStep(target, options));
|
||||
steps.push(getBuildZigStep(target, options));
|
||||
steps.push(getLinkBunStep(target, options));
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps,
|
||||
steps: unifiedBuilds
|
||||
? [getBuildBunStep(target, options)]
|
||||
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
|
||||
},
|
||||
...dependsOn,
|
||||
);
|
||||
@@ -1141,13 +1129,13 @@ async function getPipeline(options = {}) {
|
||||
}
|
||||
|
||||
if (!isMainBranch()) {
|
||||
const { skipTests, forceTests, testFiles } = options;
|
||||
const { skipTests, forceTests, unifiedTests, testFiles } = options;
|
||||
if (!skipTests || forceTests) {
|
||||
steps.push(
|
||||
...testPlatforms.map(target => ({
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps: [getTestBunStep(target, options, { testFiles, buildId })],
|
||||
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
|
||||
})),
|
||||
);
|
||||
}
|
||||
@@ -1190,43 +1178,6 @@ async function main() {
|
||||
console.log("Generated options:", options);
|
||||
}
|
||||
|
||||
startGroup("Querying GitHub for files...");
|
||||
if (options && isBuildkite && !isMainBranch()) {
|
||||
/** @type {string[]} */
|
||||
let allFiles = [];
|
||||
/** @type {string[]} */
|
||||
let newFiles = [];
|
||||
let prFileCount = 0;
|
||||
try {
|
||||
console.log("on buildkite: collecting new files from PR");
|
||||
const per_page = 50;
|
||||
const { BUILDKITE_PULL_REQUEST } = process.env;
|
||||
for (let i = 1; i <= 10; i++) {
|
||||
const res = await fetch(
|
||||
`https://api.github.com/repos/oven-sh/bun/pulls/${BUILDKITE_PULL_REQUEST}/files?per_page=${per_page}&page=${i}`,
|
||||
{ headers: { Authorization: `Bearer ${getSecret("GITHUB_TOKEN")}` } },
|
||||
);
|
||||
const doc = await res.json();
|
||||
console.log(`-> page ${i}, found ${doc.length} items`);
|
||||
if (doc.length === 0) break;
|
||||
for (const { filename, status } of doc) {
|
||||
prFileCount += 1;
|
||||
allFiles.push(filename);
|
||||
if (status !== "added") continue;
|
||||
newFiles.push(filename);
|
||||
}
|
||||
if (doc.length < per_page) break;
|
||||
}
|
||||
console.log(`- PR ${BUILDKITE_PULL_REQUEST}, ${prFileCount} files, ${newFiles.length} new files`);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
if (allFiles.every(filename => filename.startsWith("docs/"))) {
|
||||
console.log(`- PR is only docs, skipping tests!`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
startGroup("Generating pipeline...");
|
||||
const pipeline = await getPipeline(options);
|
||||
if (!pipeline) {
|
||||
|
||||
255
.buildkite/macos-runners/CLAUDE.md
Normal file
255
.buildkite/macos-runners/CLAUDE.md
Normal file
@@ -0,0 +1,255 @@
|
||||
# macOS Runner Infrastructure - Claude Development Guide
|
||||
|
||||
This document provides context and guidance for Claude to work on the macOS runner infrastructure.
|
||||
|
||||
## Overview
|
||||
|
||||
This infrastructure provides automated, scalable macOS CI runners for Bun using MacStadium's Orka platform. It implements complete job isolation, daily image rebuilds, and comprehensive testing.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
- **Packer**: Builds VM images with all required software
|
||||
- **Terraform**: Manages VM fleet with auto-scaling
|
||||
- **GitHub Actions**: Automates daily rebuilds and deployments
|
||||
- **User Management**: Creates isolated users per job (`bk-<job-id>`)
|
||||
|
||||
### Key Features
|
||||
- **Complete Job Isolation**: Each Buildkite job runs in its own user account
|
||||
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh environments
|
||||
- **Flakiness Testing**: Multiple test iterations ensure reliability (80% success rate minimum)
|
||||
- **Software Validation**: All tools tested for proper installation and functionality
|
||||
- **Version Synchronization**: Exact versions match bootstrap.sh requirements
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
.buildkite/macos-runners/
|
||||
├── packer/
|
||||
│ └── macos-base.pkr.hcl # VM image building configuration
|
||||
├── terraform/
|
||||
│ ├── main.tf # Infrastructure definition
|
||||
│ ├── variables.tf # Configuration variables
|
||||
│ ├── outputs.tf # Resource outputs
|
||||
│ └── user-data.sh # VM initialization script
|
||||
├── scripts/
|
||||
│ ├── bootstrap-macos.sh # macOS software installation
|
||||
│ ├── create-build-user.sh # User creation for job isolation
|
||||
│ ├── cleanup-build-user.sh # User cleanup after jobs
|
||||
│ └── job-runner.sh # Main job lifecycle management
|
||||
├── github-actions/
|
||||
│ ├── image-rebuild.yml # Daily image rebuild workflow
|
||||
│ └── deploy-fleet.yml # Fleet deployment workflow
|
||||
├── README.md # User documentation
|
||||
├── DEPLOYMENT.md # Deployment guide
|
||||
└── CLAUDE.md # This file
|
||||
```
|
||||
|
||||
## Software Versions (Must Match bootstrap.sh)
|
||||
|
||||
These versions are synchronized with `/scripts/bootstrap.sh`:
|
||||
|
||||
- **Node.js**: 24.3.0 (exact)
|
||||
- **Bun**: 1.2.17 (exact)
|
||||
- **LLVM**: 19.1.7 (exact)
|
||||
- **CMake**: 3.30.5 (exact)
|
||||
- **Buildkite Agent**: 3.87.0
|
||||
|
||||
## Key Scripts
|
||||
|
||||
### bootstrap-macos.sh
|
||||
- Installs all required software with exact versions
|
||||
- Configures development environment
|
||||
- Sets up Tailscale, Docker, and other dependencies
|
||||
- **Critical**: Must stay synchronized with main bootstrap.sh
|
||||
|
||||
### create-build-user.sh
|
||||
- Creates unique user per job: `bk-<job-id>`
|
||||
- Sets up isolated environment with proper permissions
|
||||
- Configures shell environment and paths
|
||||
- Creates workspace directories
|
||||
|
||||
### cleanup-build-user.sh
|
||||
- Kills all processes owned by build user
|
||||
- Removes user account and home directory
|
||||
- Cleans up temporary files and caches
|
||||
- Ensures complete isolation between jobs
|
||||
|
||||
### job-runner.sh
|
||||
- Main orchestration script
|
||||
- Manages job lifecycle: create user → run job → cleanup
|
||||
- Handles timeouts and health checks
|
||||
- Runs as root via LaunchDaemon
|
||||
|
||||
## GitHub Actions Workflows
|
||||
|
||||
### image-rebuild.yml
|
||||
- Runs daily at 2 AM UTC
|
||||
- Detects changes to trigger rebuilds
|
||||
- Builds images for macOS 13, 14, 15
|
||||
- **Validation Steps**:
|
||||
- Software installation verification
|
||||
- Flakiness testing (3 iterations, 80% success rate)
|
||||
- Health endpoint testing
|
||||
- Discord notifications for status
|
||||
|
||||
### deploy-fleet.yml
|
||||
- Manual deployment trigger
|
||||
- Validates inputs and plans changes
|
||||
- Deploys VM fleet with health checks
|
||||
- Supports different environments (prod/staging/dev)
|
||||
|
||||
## Required Secrets
|
||||
|
||||
### MacStadium
|
||||
- `MACSTADIUM_API_KEY`: API access key
|
||||
- `ORKA_ENDPOINT`: Orka API endpoint
|
||||
- `ORKA_AUTH_TOKEN`: Authentication token
|
||||
|
||||
### AWS
|
||||
- `AWS_ACCESS_KEY_ID`: For Terraform state storage
|
||||
- `AWS_SECRET_ACCESS_KEY`: For Terraform state storage
|
||||
|
||||
### Buildkite
|
||||
- `BUILDKITE_AGENT_TOKEN`: Agent registration token
|
||||
- `BUILDKITE_API_TOKEN`: For monitoring/status checks
|
||||
- `BUILDKITE_ORG`: Organization slug
|
||||
|
||||
### GitHub
|
||||
- `GITHUB_TOKEN`: For private repository access
|
||||
|
||||
### Notifications
|
||||
- `DISCORD_WEBHOOK_URL`: For status notifications
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### Adding New Software
|
||||
1. Update `bootstrap-macos.sh` with installation commands
|
||||
2. Add version verification in the script
|
||||
3. Include in validation tests in `image-rebuild.yml`
|
||||
4. Update documentation in README.md
|
||||
|
||||
### Modifying User Isolation
|
||||
1. Update `create-build-user.sh` for user creation
|
||||
2. Update `cleanup-build-user.sh` for cleanup
|
||||
3. Test isolation in `job-runner.sh`
|
||||
4. Ensure proper permissions and security
|
||||
|
||||
### Updating VM Configuration
|
||||
1. Modify `terraform/variables.tf` for fleet sizing
|
||||
2. Update `terraform/main.tf` for infrastructure changes
|
||||
3. Test deployment with `deploy-fleet.yml`
|
||||
4. Update documentation
|
||||
|
||||
### Version Updates
|
||||
1. **Critical**: Check `/scripts/bootstrap.sh` for version changes
|
||||
2. Update exact versions in `bootstrap-macos.sh`
|
||||
3. Update version verification in workflows
|
||||
4. Update documentation
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Image Validation
|
||||
- Software installation verification
|
||||
- Version checking for exact matches
|
||||
- Health endpoint testing
|
||||
- Basic functionality tests
|
||||
|
||||
### Flakiness Testing
|
||||
- 3 test iterations per image
|
||||
- 80% success rate minimum
|
||||
- Tests basic commands, Node.js, Bun, build tools
|
||||
- Automated cleanup of test VMs
|
||||
|
||||
### Integration Testing
|
||||
- End-to-end job execution
|
||||
- User isolation verification
|
||||
- Resource cleanup validation
|
||||
- Performance monitoring
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
1. **Version Mismatches**: Check bootstrap.sh for updates
|
||||
2. **User Cleanup Failures**: Check process termination and file permissions
|
||||
3. **Image Build Failures**: Check Packer logs and VM resources
|
||||
4. **Flakiness**: Investigate VM performance and network issues
|
||||
|
||||
### Debugging Commands
|
||||
```bash
|
||||
# Check VM status
|
||||
orka vm list
|
||||
|
||||
# Check image status
|
||||
orka image list
|
||||
|
||||
# Test user creation
|
||||
sudo /usr/local/bin/bun-ci/create-build-user.sh
|
||||
|
||||
# Check health endpoint
|
||||
curl http://localhost:8080/health
|
||||
|
||||
# View logs
|
||||
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Resource Management
|
||||
- VMs configured with 12 CPU cores, 32GB RAM
|
||||
- Auto-scaling based on queue demand
|
||||
- Aggressive cleanup to prevent resource leaks
|
||||
|
||||
### Cost Optimization
|
||||
- Automated cleanup of old images and snapshots
|
||||
- Efficient VM sizing based on workload requirements
|
||||
- Scheduled maintenance windows
|
||||
|
||||
## Security
|
||||
|
||||
### Isolation
|
||||
- Complete process isolation per job
|
||||
- Separate user accounts with unique UIDs
|
||||
- Cleanup of all user data after jobs
|
||||
|
||||
### Network Security
|
||||
- VPC isolation with security groups
|
||||
- Limited SSH access for debugging
|
||||
- Encrypted communications
|
||||
|
||||
### Credential Management
|
||||
- Secure secret storage in GitHub
|
||||
- No hardcoded credentials in code
|
||||
- Regular rotation of access tokens
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Health Checks
|
||||
- HTTP endpoints on port 8080
|
||||
- Buildkite agent connectivity monitoring
|
||||
- Resource usage tracking
|
||||
|
||||
### Alerts
|
||||
- Discord notifications for failures
|
||||
- Build status reporting
|
||||
- Fleet deployment notifications
|
||||
|
||||
## Next Steps for Development
|
||||
|
||||
1. **Monitor bootstrap.sh**: Watch for version updates that need synchronization
|
||||
2. **Performance Optimization**: Monitor resource usage and optimize VM sizes
|
||||
3. **Enhanced Testing**: Add more comprehensive validation tests
|
||||
4. **Cost Monitoring**: Track usage and optimize for cost efficiency
|
||||
5. **Security Hardening**: Regular security reviews and updates
|
||||
|
||||
## References
|
||||
|
||||
- [MacStadium Orka Documentation](https://orkadocs.macstadium.com/)
|
||||
- [Packer Documentation](https://www.packer.io/docs)
|
||||
- [Terraform Documentation](https://www.terraform.io/docs)
|
||||
- [Buildkite Agent Documentation](https://buildkite.com/docs/agent/v3)
|
||||
- [Main bootstrap.sh](../../scripts/bootstrap.sh) - **Keep synchronized!**
|
||||
|
||||
---
|
||||
|
||||
**Important**: This infrastructure is critical for Bun's CI/CD pipeline. Always test changes thoroughly and maintain backward compatibility. The `bootstrap-macos.sh` script must stay synchronized with the main `bootstrap.sh` script to ensure consistent environments.
|
||||
428
.buildkite/macos-runners/DEPLOYMENT.md
Normal file
428
.buildkite/macos-runners/DEPLOYMENT.md
Normal file
@@ -0,0 +1,428 @@
|
||||
# macOS Runner Deployment Guide
|
||||
|
||||
This guide provides step-by-step instructions for deploying the macOS runner infrastructure for Bun CI.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### 1. MacStadium Account Setup
|
||||
|
||||
1. **Create MacStadium Account**
|
||||
- Sign up at [MacStadium](https://www.macstadium.com/)
|
||||
- Purchase Orka plan with appropriate VM allocation
|
||||
|
||||
2. **Configure API Access**
|
||||
- Generate API key from MacStadium dashboard
|
||||
- Note down your Orka endpoint URL
|
||||
- Test API connectivity
|
||||
|
||||
3. **Base Image Preparation**
|
||||
- Ensure base macOS images are available in your account
|
||||
- Verify image naming convention: `base-images/macos-{version}-{name}`
|
||||
|
||||
### 2. AWS Account Setup
|
||||
|
||||
1. **Create AWS Account**
|
||||
- Set up AWS account for Terraform state storage
|
||||
- Create S3 bucket for Terraform backend: `bun-terraform-state`
|
||||
|
||||
2. **Configure IAM**
|
||||
- Create IAM user with appropriate permissions
|
||||
- Generate access key and secret key
|
||||
- Attach policies for S3, CloudWatch, and EC2 (if using AWS resources)
|
||||
|
||||
### 3. GitHub Repository Setup
|
||||
|
||||
1. **Fork or Clone Repository**
|
||||
- Ensure you have admin access to the repository
|
||||
- Create necessary branches for deployment
|
||||
|
||||
2. **Configure Repository Secrets**
|
||||
- Add all required secrets (see main README.md)
|
||||
- Test secret accessibility
|
||||
|
||||
### 4. Buildkite Setup
|
||||
|
||||
1. **Organization Configuration**
|
||||
- Create or access Buildkite organization
|
||||
- Generate agent token with appropriate permissions
|
||||
- Note organization slug
|
||||
|
||||
2. **Queue Configuration**
|
||||
- Create queues: `macos`, `macos-arm64`, `macos-x86_64`
|
||||
- Configure queue-specific settings
|
||||
|
||||
## Step-by-Step Deployment
|
||||
|
||||
### Step 1: Environment Preparation
|
||||
|
||||
1. **Install Required Tools**
|
||||
```bash
|
||||
# Install Terraform
|
||||
wget https://releases.hashicorp.com/terraform/1.6.0/terraform_1.6.0_linux_amd64.zip
|
||||
unzip terraform_1.6.0_linux_amd64.zip
|
||||
sudo mv terraform /usr/local/bin/
|
||||
|
||||
# Install Packer
|
||||
wget https://releases.hashicorp.com/packer/1.9.4/packer_1.9.4_linux_amd64.zip
|
||||
unzip packer_1.9.4_linux_amd64.zip
|
||||
sudo mv packer /usr/local/bin/
|
||||
|
||||
# Install AWS CLI
|
||||
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
|
||||
unzip awscliv2.zip
|
||||
sudo ./aws/install
|
||||
|
||||
# Install MacStadium CLI
|
||||
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
|
||||
sudo mv orka-cli /usr/local/bin/orka
|
||||
```
|
||||
|
||||
2. **Configure AWS Credentials**
|
||||
```bash
|
||||
aws configure
|
||||
# Enter your AWS access key, secret key, and region
|
||||
```
|
||||
|
||||
3. **Configure MacStadium CLI**
|
||||
```bash
|
||||
orka config set endpoint <your-orka-endpoint>
|
||||
orka auth token <your-orka-token>
|
||||
```
|
||||
|
||||
### Step 2: SSH Key Setup
|
||||
|
||||
1. **Generate SSH Key Pair**
|
||||
```bash
|
||||
ssh-keygen -t rsa -b 4096 -f ~/.ssh/bun-runner -N ""
|
||||
```
|
||||
|
||||
2. **Copy Public Key to Terraform Directory**
|
||||
```bash
|
||||
mkdir -p .buildkite/macos-runners/terraform/ssh-keys
|
||||
cp ~/.ssh/bun-runner.pub .buildkite/macos-runners/terraform/ssh-keys/bun-runner.pub
|
||||
```
|
||||
|
||||
### Step 3: Terraform Backend Setup
|
||||
|
||||
1. **Create S3 Bucket for Terraform State**
|
||||
```bash
|
||||
aws s3 mb s3://bun-terraform-state --region us-west-2
|
||||
aws s3api put-bucket-versioning --bucket bun-terraform-state --versioning-configuration Status=Enabled
|
||||
aws s3api put-bucket-encryption --bucket bun-terraform-state --server-side-encryption-configuration '{
|
||||
"Rules": [
|
||||
{
|
||||
"ApplyServerSideEncryptionByDefault": {
|
||||
"SSEAlgorithm": "AES256"
|
||||
}
|
||||
}
|
||||
]
|
||||
}'
|
||||
```
|
||||
|
||||
2. **Create Terraform Variables File**
|
||||
```bash
|
||||
cd .buildkite/macos-runners/terraform
|
||||
cat > production.tfvars << EOF
|
||||
environment = "production"
|
||||
macstadium_api_key = "your-macstadium-api-key"
|
||||
buildkite_agent_token = "your-buildkite-agent-token"
|
||||
github_token = "your-github-token"
|
||||
fleet_size = {
|
||||
macos_13 = 4
|
||||
macos_14 = 6
|
||||
macos_15 = 8
|
||||
}
|
||||
vm_configuration = {
|
||||
cpu_count = 12
|
||||
memory_gb = 32
|
||||
disk_size = 500
|
||||
}
|
||||
EOF
|
||||
```
|
||||
|
||||
### Step 4: Build VM Images
|
||||
|
||||
1. **Validate Packer Configuration**
|
||||
```bash
|
||||
cd .buildkite/macos-runners/packer
|
||||
packer validate -var "macos_version=15" macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
2. **Build macOS 15 Image**
|
||||
```bash
|
||||
packer build \
|
||||
-var "macos_version=15" \
|
||||
-var "orka_endpoint=<your-orka-endpoint>" \
|
||||
-var "orka_auth_token=<your-orka-token>" \
|
||||
macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
3. **Build macOS 14 Image**
|
||||
```bash
|
||||
packer build \
|
||||
-var "macos_version=14" \
|
||||
-var "orka_endpoint=<your-orka-endpoint>" \
|
||||
-var "orka_auth_token=<your-orka-token>" \
|
||||
macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
4. **Build macOS 13 Image**
|
||||
```bash
|
||||
packer build \
|
||||
-var "macos_version=13" \
|
||||
-var "orka_endpoint=<your-orka-endpoint>" \
|
||||
-var "orka_auth_token=<your-orka-token>" \
|
||||
macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
### Step 5: Deploy VM Fleet
|
||||
|
||||
1. **Initialize Terraform**
|
||||
```bash
|
||||
cd .buildkite/macos-runners/terraform
|
||||
terraform init
|
||||
```
|
||||
|
||||
2. **Create Production Workspace**
|
||||
```bash
|
||||
terraform workspace new production
|
||||
```
|
||||
|
||||
3. **Plan Deployment**
|
||||
```bash
|
||||
terraform plan -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
4. **Apply Deployment**
|
||||
```bash
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
### Step 6: Verify Deployment
|
||||
|
||||
1. **Check VM Status**
|
||||
```bash
|
||||
orka vm list
|
||||
```
|
||||
|
||||
2. **Check Terraform Outputs**
|
||||
```bash
|
||||
terraform output
|
||||
```
|
||||
|
||||
3. **Test VM Connectivity**
|
||||
```bash
|
||||
# Get VM IP from terraform output
|
||||
VM_IP=$(terraform output -json vm_instances | jq -r '.value | to_entries[0].value.ip_address')
|
||||
|
||||
# Test SSH connectivity
|
||||
ssh -i ~/.ssh/bun-runner admin@$VM_IP
|
||||
|
||||
# Test health endpoint
|
||||
curl http://$VM_IP:8080/health
|
||||
```
|
||||
|
||||
4. **Verify Buildkite Agent Connectivity**
|
||||
```bash
|
||||
curl -H "Authorization: Bearer <your-buildkite-api-token>" \
|
||||
"https://api.buildkite.com/v2/organizations/<your-org>/agents"
|
||||
```
|
||||
|
||||
### Step 7: Configure GitHub Actions
|
||||
|
||||
1. **Enable GitHub Actions Workflows**
|
||||
- Navigate to repository Actions tab
|
||||
- Enable workflows if not already enabled
|
||||
|
||||
2. **Test Image Rebuild Workflow**
|
||||
```bash
|
||||
# Trigger manual rebuild
|
||||
gh workflow run image-rebuild.yml
|
||||
```
|
||||
|
||||
3. **Test Fleet Deployment Workflow**
|
||||
```bash
|
||||
# Trigger manual deployment
|
||||
gh workflow run deploy-fleet.yml
|
||||
```
|
||||
|
||||
## Post-Deployment Configuration
|
||||
|
||||
### 1. Monitoring Setup
|
||||
|
||||
1. **CloudWatch Dashboards**
|
||||
- Create custom dashboards for VM metrics
|
||||
- Set up alarms for critical thresholds
|
||||
|
||||
2. **Discord Notifications**
|
||||
- Configure Discord webhook for alerts
|
||||
- Test notification delivery
|
||||
|
||||
### 2. Backup Configuration
|
||||
|
||||
1. **Enable Automated Snapshots**
|
||||
```bash
|
||||
# Update terraform configuration
|
||||
backup_config = {
|
||||
enable_snapshots = true
|
||||
snapshot_schedule = "0 4 * * *"
|
||||
snapshot_retention = 7
|
||||
}
|
||||
```
|
||||
|
||||
2. **Test Backup Restoration**
|
||||
- Create test snapshot
|
||||
- Verify restoration process
|
||||
|
||||
### 3. Security Hardening
|
||||
|
||||
1. **Review Security Groups**
|
||||
- Minimize open ports
|
||||
- Restrict source IP ranges
|
||||
|
||||
2. **Enable Audit Logging**
|
||||
- Configure CloudTrail for AWS resources
|
||||
- Enable MacStadium audit logs
|
||||
|
||||
### 4. Performance Optimization
|
||||
|
||||
1. **Monitor Resource Usage**
|
||||
- Review CPU, memory, disk usage
|
||||
- Adjust VM sizes if needed
|
||||
|
||||
2. **Optimize Auto-Scaling**
|
||||
- Monitor scaling events
|
||||
- Adjust thresholds as needed
|
||||
|
||||
## Maintenance Procedures
|
||||
|
||||
### Daily Maintenance
|
||||
|
||||
1. **Automated Tasks**
|
||||
- Image rebuilds (automatic)
|
||||
- Health checks (automatic)
|
||||
- Cleanup processes (automatic)
|
||||
|
||||
2. **Manual Monitoring**
|
||||
- Check Discord notifications
|
||||
- Review CloudWatch metrics
|
||||
- Monitor Buildkite queue
|
||||
|
||||
### Weekly Maintenance
|
||||
|
||||
1. **Review Metrics**
|
||||
- Analyze performance trends
|
||||
- Check cost optimization opportunities
|
||||
|
||||
2. **Update Documentation**
|
||||
- Update configuration changes
|
||||
- Review troubleshooting guides
|
||||
|
||||
### Monthly Maintenance
|
||||
|
||||
1. **Capacity Planning**
|
||||
- Review usage patterns
|
||||
- Plan capacity adjustments
|
||||
|
||||
2. **Security Updates**
|
||||
- Review security patches
|
||||
- Update base images if needed
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Issue: VM Creation Fails
|
||||
|
||||
```bash
|
||||
# Check MacStadium account limits
|
||||
orka account info
|
||||
|
||||
# Check available resources
|
||||
orka resource list
|
||||
|
||||
# Review Packer logs
|
||||
tail -f packer-build.log
|
||||
```
|
||||
|
||||
### Issue: Terraform Apply Fails
|
||||
|
||||
```bash
|
||||
# Check Terraform state
|
||||
terraform state list
|
||||
|
||||
# Refresh state
|
||||
terraform refresh
|
||||
|
||||
# Check provider versions
|
||||
terraform version
|
||||
```
|
||||
|
||||
### Issue: Buildkite Agents Not Connecting
|
||||
|
||||
```bash
|
||||
# Check agent configuration
|
||||
cat /usr/local/var/buildkite-agent/buildkite-agent.cfg
|
||||
|
||||
# Check agent logs
|
||||
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
|
||||
|
||||
# Restart agent service
|
||||
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
```
|
||||
|
||||
## Rollback Procedures
|
||||
|
||||
### Rollback VM Fleet
|
||||
|
||||
1. **Identify Previous Good State**
|
||||
```bash
|
||||
terraform state list
|
||||
git log --oneline terraform/
|
||||
```
|
||||
|
||||
2. **Rollback to Previous Configuration**
|
||||
```bash
|
||||
git checkout <previous-commit>
|
||||
terraform plan -var-file="production.tfvars"
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
### Rollback VM Images
|
||||
|
||||
1. **List Available Images**
|
||||
```bash
|
||||
orka image list
|
||||
```
|
||||
|
||||
2. **Update Terraform to Use Previous Images**
|
||||
```bash
|
||||
# Edit terraform configuration to use previous image IDs
|
||||
terraform plan -var-file="production.tfvars"
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
## Cost Optimization Tips
|
||||
|
||||
1. **Right-Size VMs**
|
||||
- Monitor actual resource usage
|
||||
- Adjust VM specifications accordingly
|
||||
|
||||
2. **Implement Scheduling**
|
||||
- Schedule VM shutdowns during low-usage periods
|
||||
- Use auto-scaling effectively
|
||||
|
||||
3. **Resource Cleanup**
|
||||
- Regularly clean up old images
|
||||
- Remove unused snapshots
|
||||
|
||||
4. **Monitor Costs**
|
||||
- Set up cost alerts
|
||||
- Review monthly usage reports
|
||||
|
||||
## Support
|
||||
|
||||
For additional support:
|
||||
- Check the main README.md for troubleshooting
|
||||
- Review GitHub Actions logs
|
||||
- Contact MacStadium support for platform issues
|
||||
- Open issues in the repository for infrastructure problems
|
||||
374
.buildkite/macos-runners/README.md
Normal file
374
.buildkite/macos-runners/README.md
Normal file
@@ -0,0 +1,374 @@
|
||||
# macOS Runner Infrastructure
|
||||
|
||||
This directory contains the infrastructure-as-code for deploying and managing macOS CI runners for the Bun project. It is located in the `.buildkite` folder alongside other CI configuration. The infrastructure provides automated, scalable, and reliable macOS build environments using MacStadium's Orka platform.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
The infrastructure consists of several key components:
|
||||
|
||||
1. **VM Images**: Golden images built with Packer containing all necessary software
|
||||
2. **VM Fleet**: Terraform-managed fleet of macOS VMs across different versions
|
||||
3. **User Isolation**: Per-job user creation and cleanup for complete isolation
|
||||
4. **Automation**: GitHub Actions workflows for daily image rebuilds and fleet management
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Complete Isolation**: Each Buildkite job runs in its own user account
|
||||
- **Automatic Cleanup**: Processes and temporary files are cleaned up after each job
|
||||
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh, up-to-date environments
|
||||
- **Multi-Version Support**: Supports macOS 13, 14, and 15 simultaneously
|
||||
- **Auto-Scaling**: Automatic scaling based on job queue demand
|
||||
- **Health Monitoring**: Continuous health checks and monitoring
|
||||
- **Cost Optimization**: Efficient resource utilization and cleanup
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
.buildkite/macos-runners/
|
||||
├── packer/ # Packer configuration for VM images
|
||||
│ ├── macos-base.pkr.hcl # Main Packer configuration
|
||||
│ └── ssh-keys/ # SSH keys for VM access
|
||||
├── terraform/ # Terraform configuration for VM fleet
|
||||
│ ├── main.tf # Main Terraform configuration
|
||||
│ ├── variables.tf # Variable definitions
|
||||
│ ├── outputs.tf # Output definitions
|
||||
│ └── user-data.sh # VM initialization script
|
||||
├── scripts/ # Management and utility scripts
|
||||
│ ├── bootstrap-macos.sh # macOS-specific bootstrap script
|
||||
│ ├── create-build-user.sh # User creation script
|
||||
│ ├── cleanup-build-user.sh # User cleanup script
|
||||
│ └── job-runner.sh # Main job runner script
|
||||
├── github-actions/ # GitHub Actions workflows
|
||||
│ ├── image-rebuild.yml # Daily image rebuild workflow
|
||||
│ └── deploy-fleet.yml # Fleet deployment workflow
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before deploying the infrastructure, ensure you have:
|
||||
|
||||
1. **MacStadium Account**: Active MacStadium Orka account with API access
|
||||
2. **AWS Account**: For Terraform state storage and CloudWatch monitoring
|
||||
3. **GitHub Repository**: With required secrets configured
|
||||
4. **Buildkite Account**: With organization and agent tokens
|
||||
5. **Required Tools**: Packer, Terraform, AWS CLI, and MacStadium CLI
|
||||
|
||||
## Required Secrets
|
||||
|
||||
Configure the following secrets in your GitHub repository:
|
||||
|
||||
### MacStadium
|
||||
- `MACSTADIUM_API_KEY`: MacStadium API key
|
||||
- `ORKA_ENDPOINT`: MacStadium Orka API endpoint
|
||||
- `ORKA_AUTH_TOKEN`: MacStadium authentication token
|
||||
|
||||
### AWS
|
||||
- `AWS_ACCESS_KEY_ID`: AWS access key ID
|
||||
- `AWS_SECRET_ACCESS_KEY`: AWS secret access key
|
||||
|
||||
### Buildkite
|
||||
- `BUILDKITE_AGENT_TOKEN`: Buildkite agent token
|
||||
- `BUILDKITE_API_TOKEN`: Buildkite API token (for monitoring)
|
||||
- `BUILDKITE_ORG`: Buildkite organization slug
|
||||
|
||||
### GitHub
|
||||
- `GITHUB_TOKEN`: GitHub personal access token (for private repositories)
|
||||
|
||||
### Notifications
|
||||
- `DISCORD_WEBHOOK_URL`: Discord webhook URL for notifications
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Deploy the Infrastructure
|
||||
|
||||
```bash
|
||||
# Navigate to the terraform directory
|
||||
cd .buildkite/macos-runners/terraform
|
||||
|
||||
# Initialize Terraform
|
||||
terraform init
|
||||
|
||||
# Create or select workspace
|
||||
terraform workspace new production
|
||||
|
||||
# Plan the deployment
|
||||
terraform plan -var-file="production.tfvars"
|
||||
|
||||
# Apply the deployment
|
||||
terraform apply -var-file="production.tfvars"
|
||||
```
|
||||
|
||||
### 2. Build VM Images
|
||||
|
||||
```bash
|
||||
# Navigate to the packer directory
|
||||
cd .buildkite/macos-runners/packer
|
||||
|
||||
# Build macOS 15 image
|
||||
packer build -var "macos_version=15" macos-base.pkr.hcl
|
||||
|
||||
# Build macOS 14 image
|
||||
packer build -var "macos_version=14" macos-base.pkr.hcl
|
||||
|
||||
# Build macOS 13 image
|
||||
packer build -var "macos_version=13" macos-base.pkr.hcl
|
||||
```
|
||||
|
||||
### 3. Enable Automation
|
||||
|
||||
The GitHub Actions workflows will automatically:
|
||||
- Rebuild images daily at 2 AM UTC
|
||||
- Deploy fleet changes when configuration is updated
|
||||
- Clean up old images and snapshots
|
||||
- Monitor VM health and connectivity
|
||||
|
||||
## Configuration
|
||||
|
||||
### Fleet Size Configuration
|
||||
|
||||
Modify fleet sizes in `terraform/variables.tf`:
|
||||
|
||||
```hcl
|
||||
variable "fleet_size" {
|
||||
default = {
|
||||
macos_13 = 4 # Number of macOS 13 VMs
|
||||
macos_14 = 6 # Number of macOS 14 VMs
|
||||
macos_15 = 8 # Number of macOS 15 VMs
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### VM Configuration
|
||||
|
||||
Adjust VM specifications in `terraform/variables.tf`:
|
||||
|
||||
```hcl
|
||||
variable "vm_configuration" {
|
||||
default = {
|
||||
cpu_count = 12 # Number of CPU cores
|
||||
memory_gb = 32 # Memory in GB
|
||||
disk_size = 500 # Disk size in GB
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Auto-Scaling Configuration
|
||||
|
||||
Configure auto-scaling parameters:
|
||||
|
||||
```hcl
|
||||
variable "autoscaling_config" {
|
||||
default = {
|
||||
min_size = 2
|
||||
max_size = 30
|
||||
desired_capacity = 10
|
||||
scale_up_threshold = 80
|
||||
scale_down_threshold = 20
|
||||
scale_up_adjustment = 2
|
||||
scale_down_adjustment = 1
|
||||
cooldown_period = 300
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Software Included
|
||||
|
||||
Each VM image includes:
|
||||
|
||||
### Development Tools
|
||||
- Xcode Command Line Tools
|
||||
- LLVM/Clang 19.1.7 (exact version)
|
||||
- CMake 3.30.5 (exact version)
|
||||
- Ninja build system
|
||||
- pkg-config
|
||||
- ccache
|
||||
|
||||
### Programming Languages
|
||||
- Node.js 24.3.0 (exact version, matches bootstrap.sh)
|
||||
- Bun 1.2.17 (exact version, matches bootstrap.sh)
|
||||
- Python 3.11 and 3.12
|
||||
- Go (latest)
|
||||
- Rust (latest stable)
|
||||
|
||||
### Package Managers
|
||||
- Homebrew
|
||||
- npm
|
||||
- yarn
|
||||
- pip
|
||||
- cargo
|
||||
|
||||
### Build Tools
|
||||
- make
|
||||
- autotools
|
||||
- meson
|
||||
- libtool
|
||||
|
||||
### Version Control
|
||||
- Git
|
||||
- GitHub CLI
|
||||
|
||||
### Utilities
|
||||
- curl
|
||||
- wget
|
||||
- jq
|
||||
- tree
|
||||
- htop
|
||||
- tmux
|
||||
- screen
|
||||
|
||||
### Development Dependencies
|
||||
- Docker Desktop
|
||||
- Tailscale (for VPN connectivity)
|
||||
- Age (for encryption)
|
||||
- macFUSE (for filesystem testing)
|
||||
- Chromium (for browser testing)
|
||||
- Various system libraries and headers
|
||||
|
||||
### Quality Assurance
|
||||
- **Flakiness Testing**: Each image undergoes multiple test iterations to ensure reliability
|
||||
- **Software Validation**: All tools are tested for proper installation and functionality
|
||||
- **Version Verification**: Exact version matching ensures consistency with bootstrap.sh
|
||||
|
||||
## User Isolation
|
||||
|
||||
Each Buildkite job runs in complete isolation:
|
||||
|
||||
1. **Unique User**: Each job gets a unique user account (`bk-<job-id>`)
|
||||
2. **Isolated Environment**: Separate home directory and environment variables
|
||||
3. **Process Isolation**: All processes are killed after job completion
|
||||
4. **File System Cleanup**: Temporary files and caches are cleaned up
|
||||
5. **Network Isolation**: No shared network resources between jobs
|
||||
|
||||
## Monitoring and Alerting
|
||||
|
||||
The infrastructure includes comprehensive monitoring:
|
||||
|
||||
- **Health Checks**: HTTP health endpoints on each VM
|
||||
- **CloudWatch Metrics**: CPU, memory, disk usage monitoring
|
||||
- **Buildkite Integration**: Agent connectivity monitoring
|
||||
- **Slack Notifications**: Success/failure notifications
|
||||
- **Log Aggregation**: Centralized logging for troubleshooting
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- **Encrypted Disks**: All VM disks are encrypted
|
||||
- **Network Security**: Security groups restrict network access
|
||||
- **SSH Key Management**: Secure SSH key distribution
|
||||
- **Regular Updates**: Automatic security updates
|
||||
- **Process Isolation**: Complete isolation between jobs
|
||||
- **Secure Credential Handling**: Secrets are managed securely
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **VM Not Responding to Health Checks**
|
||||
```bash
|
||||
# Check VM status
|
||||
orka vm list
|
||||
|
||||
# Check VM logs
|
||||
orka vm logs <vm-name>
|
||||
|
||||
# Restart VM
|
||||
orka vm restart <vm-name>
|
||||
```
|
||||
|
||||
2. **Buildkite Agent Not Connecting**
|
||||
```bash
|
||||
# Check agent status
|
||||
sudo launchctl list | grep buildkite
|
||||
|
||||
# Check agent logs
|
||||
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
|
||||
|
||||
# Restart agent
|
||||
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
```
|
||||
|
||||
3. **User Creation Failures**
|
||||
```bash
|
||||
# Check user creation logs
|
||||
tail -f /var/log/system.log | grep "create-build-user"
|
||||
|
||||
# Manual cleanup
|
||||
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh <username>
|
||||
```
|
||||
|
||||
4. **Disk Space Issues**
|
||||
```bash
|
||||
# Check disk usage
|
||||
df -h
|
||||
|
||||
# Clean up old files
|
||||
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh --cleanup-all
|
||||
```
|
||||
|
||||
### Debugging Commands
|
||||
|
||||
```bash
|
||||
# Check system status
|
||||
sudo /usr/local/bin/bun-ci/job-runner.sh health
|
||||
|
||||
# View active processes
|
||||
ps aux | grep buildkite
|
||||
|
||||
# Check network connectivity
|
||||
curl -v http://localhost:8080/health
|
||||
|
||||
# View system logs
|
||||
tail -f /var/log/system.log
|
||||
|
||||
# Check Docker status
|
||||
docker info
|
||||
```
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Regular Tasks
|
||||
|
||||
1. **Image Updates**: Images are rebuilt daily automatically
|
||||
2. **Fleet Updates**: Terraform changes are applied automatically
|
||||
3. **Cleanup**: Old images and snapshots are cleaned up automatically
|
||||
4. **Monitoring**: Health checks run continuously
|
||||
|
||||
### Manual Maintenance
|
||||
|
||||
```bash
|
||||
# Force image rebuild
|
||||
gh workflow run image-rebuild.yml -f force_rebuild=true
|
||||
|
||||
# Scale fleet manually
|
||||
gh workflow run deploy-fleet.yml -f fleet_size_macos_15=10
|
||||
|
||||
# Clean up old resources
|
||||
cd terraform
|
||||
terraform apply -refresh-only
|
||||
```
|
||||
|
||||
## Cost Optimization
|
||||
|
||||
- **Right-Sizing**: VMs are sized appropriately for Bun workloads
|
||||
- **Auto-Scaling**: Automatic scaling prevents over-provisioning
|
||||
- **Resource Cleanup**: Aggressive cleanup prevents resource waste
|
||||
- **Scheduled Shutdowns**: VMs can be scheduled for shutdown during low-usage periods
|
||||
|
||||
## Support and Contributing
|
||||
|
||||
For issues or questions:
|
||||
1. Check the troubleshooting section above
|
||||
2. Review GitHub Actions workflow logs
|
||||
3. Check MacStadium Orka console
|
||||
4. Open an issue in the repository
|
||||
|
||||
When contributing:
|
||||
1. Test changes in a staging environment first
|
||||
2. Update documentation as needed
|
||||
3. Follow the existing code style
|
||||
4. Add appropriate tests and validation
|
||||
|
||||
## License
|
||||
|
||||
This infrastructure code is part of the Bun project and follows the same license terms.
|
||||
376
.buildkite/macos-runners/github-actions/deploy-fleet.yml
Normal file
376
.buildkite/macos-runners/github-actions/deploy-fleet.yml
Normal file
@@ -0,0 +1,376 @@
|
||||
name: Deploy macOS Runner Fleet
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
description: 'Deployment environment'
|
||||
required: true
|
||||
default: 'production'
|
||||
type: choice
|
||||
options:
|
||||
- production
|
||||
- staging
|
||||
- development
|
||||
fleet_size_macos_13:
|
||||
description: 'Number of macOS 13 VMs'
|
||||
required: false
|
||||
default: '4'
|
||||
fleet_size_macos_14:
|
||||
description: 'Number of macOS 14 VMs'
|
||||
required: false
|
||||
default: '6'
|
||||
fleet_size_macos_15:
|
||||
description: 'Number of macOS 15 VMs'
|
||||
required: false
|
||||
default: '8'
|
||||
force_deploy:
|
||||
description: 'Force deployment even if no changes'
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
TERRAFORM_VERSION: "1.6.0"
|
||||
AWS_REGION: "us-west-2"
|
||||
|
||||
jobs:
|
||||
validate-inputs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
validated: ${{ steps.validate.outputs.validated }}
|
||||
total_vms: ${{ steps.validate.outputs.total_vms }}
|
||||
steps:
|
||||
- name: Validate inputs
|
||||
id: validate
|
||||
run: |
|
||||
# Validate fleet sizes
|
||||
macos_13="${{ github.event.inputs.fleet_size_macos_13 }}"
|
||||
macos_14="${{ github.event.inputs.fleet_size_macos_14 }}"
|
||||
macos_15="${{ github.event.inputs.fleet_size_macos_15 }}"
|
||||
|
||||
# Check if inputs are valid numbers
|
||||
if ! [[ "$macos_13" =~ ^[0-9]+$ ]] || ! [[ "$macos_14" =~ ^[0-9]+$ ]] || ! [[ "$macos_15" =~ ^[0-9]+$ ]]; then
|
||||
echo "Error: Fleet sizes must be valid numbers"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if at least one VM is requested
|
||||
total_vms=$((macos_13 + macos_14 + macos_15))
|
||||
if [[ $total_vms -eq 0 ]]; then
|
||||
echo "Error: At least one VM must be requested"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check reasonable limits
|
||||
if [[ $total_vms -gt 50 ]]; then
|
||||
echo "Error: Total VMs cannot exceed 50"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "validated=true" >> $GITHUB_OUTPUT
|
||||
echo "total_vms=$total_vms" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "Validation passed:"
|
||||
echo "- macOS 13: $macos_13 VMs"
|
||||
echo "- macOS 14: $macos_14 VMs"
|
||||
echo "- macOS 15: $macos_15 VMs"
|
||||
echo "- Total: $total_vms VMs"
|
||||
|
||||
plan-deployment:
|
||||
runs-on: ubuntu-latest
|
||||
needs: validate-inputs
|
||||
if: needs.validate-inputs.outputs.validated == 'true'
|
||||
outputs:
|
||||
plan_status: ${{ steps.plan.outputs.plan_status }}
|
||||
has_changes: ${{ steps.plan.outputs.has_changes }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Initialize Terraform
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform init
|
||||
terraform workspace select ${{ github.event.inputs.environment }} || terraform workspace new ${{ github.event.inputs.environment }}
|
||||
|
||||
- name: Create terraform variables file
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
cat > terraform.tfvars << EOF
|
||||
environment = "${{ github.event.inputs.environment }}"
|
||||
fleet_size = {
|
||||
macos_13 = ${{ github.event.inputs.fleet_size_macos_13 }}
|
||||
macos_14 = ${{ github.event.inputs.fleet_size_macos_14 }}
|
||||
macos_15 = ${{ github.event.inputs.fleet_size_macos_15 }}
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Plan Terraform deployment
|
||||
id: plan
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Run terraform plan
|
||||
terraform plan \
|
||||
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
|
||||
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
|
||||
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
|
||||
-out=tfplan \
|
||||
-detailed-exitcode > plan_output.txt 2>&1
|
||||
|
||||
plan_exit_code=$?
|
||||
|
||||
# Check plan results
|
||||
if [[ $plan_exit_code -eq 0 ]]; then
|
||||
echo "plan_status=no_changes" >> $GITHUB_OUTPUT
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
elif [[ $plan_exit_code -eq 2 ]]; then
|
||||
echo "plan_status=has_changes" >> $GITHUB_OUTPUT
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "plan_status=failed" >> $GITHUB_OUTPUT
|
||||
echo "has_changes=false" >> $GITHUB_OUTPUT
|
||||
cat plan_output.txt
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save plan output
|
||||
echo "Plan output:"
|
||||
cat plan_output.txt
|
||||
|
||||
- name: Upload plan
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: terraform-plan
|
||||
path: |
|
||||
.buildkite/macos-runners/terraform/tfplan
|
||||
.buildkite/macos-runners/terraform/plan_output.txt
|
||||
retention-days: 30
|
||||
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment]
|
||||
if: needs.plan-deployment.outputs.has_changes == 'true' || github.event.inputs.force_deploy == 'true'
|
||||
environment: ${{ github.event.inputs.environment }}
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Download plan
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: terraform-plan
|
||||
path: .buildkite/macos-runners/terraform/
|
||||
|
||||
- name: Initialize Terraform
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform init
|
||||
terraform workspace select ${{ github.event.inputs.environment }}
|
||||
|
||||
- name: Apply Terraform deployment
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
echo "Applying Terraform deployment..."
|
||||
terraform apply -auto-approve tfplan
|
||||
|
||||
- name: Get deployment outputs
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform output -json > terraform-outputs.json
|
||||
echo "Deployment outputs:"
|
||||
cat terraform-outputs.json | jq .
|
||||
|
||||
- name: Upload deployment outputs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: deployment-outputs-${{ github.event.inputs.environment }}
|
||||
path: .buildkite/macos-runners/terraform/terraform-outputs.json
|
||||
retention-days: 90
|
||||
|
||||
- name: Verify deployment
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
echo "Verifying deployment..."
|
||||
|
||||
# Check VM count
|
||||
vm_count=$(terraform output -json vm_instances | jq 'length')
|
||||
expected_count=${{ needs.validate-inputs.outputs.total_vms }}
|
||||
|
||||
if [[ $vm_count -eq $expected_count ]]; then
|
||||
echo "✅ VM count matches expected: $vm_count"
|
||||
else
|
||||
echo "❌ VM count mismatch: expected $expected_count, got $vm_count"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check VM states
|
||||
terraform output -json vm_instances | jq -r 'to_entries[] | "\(.key): \(.value.name) - \(.value.status)"' | while read vm_info; do
|
||||
echo "VM: $vm_info"
|
||||
done
|
||||
|
||||
health-check:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment, deploy]
|
||||
if: always() && needs.deploy.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq curl
|
||||
|
||||
- name: Download deployment outputs
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: deployment-outputs-${{ github.event.inputs.environment }}
|
||||
path: ./
|
||||
|
||||
- name: Wait for VMs to be ready
|
||||
run: |
|
||||
echo "Waiting for VMs to be ready..."
|
||||
sleep 300 # Wait 5 minutes for VMs to initialize
|
||||
|
||||
- name: Check VM health
|
||||
run: |
|
||||
echo "Checking VM health..."
|
||||
|
||||
# Read VM details from outputs
|
||||
jq -r '.vm_instances.value | to_entries[] | "\(.value.name) \(.value.ip_address)"' terraform-outputs.json | while read vm_name vm_ip; do
|
||||
echo "Checking VM: $vm_name ($vm_ip)"
|
||||
|
||||
# Check health endpoint
|
||||
max_attempts=12
|
||||
attempt=1
|
||||
|
||||
while [[ $attempt -le $max_attempts ]]; do
|
||||
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
|
||||
echo "✅ $vm_name is healthy"
|
||||
break
|
||||
else
|
||||
echo "⏳ $vm_name not ready yet (attempt $attempt/$max_attempts)"
|
||||
sleep 30
|
||||
((attempt++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $attempt -gt $max_attempts ]]; then
|
||||
echo "❌ $vm_name failed health check"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check Buildkite connectivity
|
||||
run: |
|
||||
echo "Checking Buildkite agent connectivity..."
|
||||
|
||||
# Wait a bit more for agents to connect
|
||||
sleep 60
|
||||
|
||||
# Check connected agents
|
||||
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
|
||||
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
|
||||
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state) \(.hostname)"' | \
|
||||
while read agent_name state hostname; do
|
||||
echo "Agent: $agent_name - State: $state - Host: $hostname"
|
||||
done
|
||||
|
||||
notify-success:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment, deploy, health-check]
|
||||
if: always() && needs.deploy.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Notify success
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: success
|
||||
title: "macOS runner fleet deployed successfully"
|
||||
description: |
|
||||
🚀 **macOS runner fleet deployed successfully**
|
||||
|
||||
**Environment:** ${{ github.event.inputs.environment }}
|
||||
**Total VMs:** ${{ needs.validate-inputs.outputs.total_vms }}
|
||||
|
||||
**Fleet composition:**
|
||||
- macOS 13: ${{ github.event.inputs.fleet_size_macos_13 }} VMs
|
||||
- macOS 14: ${{ github.event.inputs.fleet_size_macos_14 }} VMs
|
||||
- macOS 15: ${{ github.event.inputs.fleet_size_macos_15 }} VMs
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
color: 0x00ff00
|
||||
username: "GitHub Actions"
|
||||
|
||||
notify-failure:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment, deploy, health-check]
|
||||
if: always() && (needs.validate-inputs.result == 'failure' || needs.plan-deployment.result == 'failure' || needs.deploy.result == 'failure')
|
||||
|
||||
steps:
|
||||
- name: Notify failure
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: failure
|
||||
title: "macOS runner fleet deployment failed"
|
||||
description: |
|
||||
🔴 **macOS runner fleet deployment failed**
|
||||
|
||||
**Environment:** ${{ github.event.inputs.environment }}
|
||||
**Failed stage:** ${{ needs.validate-inputs.result == 'failure' && 'Validation' || needs.plan-deployment.result == 'failure' && 'Planning' || 'Deployment' }}
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
|
||||
Please check the logs for more details.
|
||||
color: 0xff0000
|
||||
username: "GitHub Actions"
|
||||
|
||||
notify-no-changes:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [validate-inputs, plan-deployment]
|
||||
if: needs.plan-deployment.outputs.has_changes == 'false' && github.event.inputs.force_deploy != 'true'
|
||||
|
||||
steps:
|
||||
- name: Notify no changes
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: cancelled
|
||||
title: "macOS runner fleet deployment skipped"
|
||||
description: |
|
||||
ℹ️ **macOS runner fleet deployment skipped** - no changes detected in Terraform plan
|
||||
color: 0x808080
|
||||
username: "GitHub Actions"
|
||||
515
.buildkite/macos-runners/github-actions/image-rebuild.yml
Normal file
515
.buildkite/macos-runners/github-actions/image-rebuild.yml
Normal file
@@ -0,0 +1,515 @@
|
||||
name: Rebuild macOS Runner Images
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run daily at 2 AM UTC
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
macos_versions:
|
||||
description: 'macOS versions to rebuild (comma-separated: 13,14,15)'
|
||||
required: false
|
||||
default: '13,14,15'
|
||||
force_rebuild:
|
||||
description: 'Force rebuild even if no changes detected'
|
||||
required: false
|
||||
default: 'false'
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
PACKER_VERSION: "1.9.4"
|
||||
TERRAFORM_VERSION: "1.6.0"
|
||||
|
||||
jobs:
|
||||
check-changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_rebuild: ${{ steps.check.outputs.should_rebuild }}
|
||||
changed_files: ${{ steps.check.outputs.changed_files }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check for changes
|
||||
id: check
|
||||
run: |
|
||||
# Check if any relevant files have changed in the last 24 hours
|
||||
changed_files=$(git diff --name-only HEAD~1 HEAD | grep -E "(bootstrap|packer|\.buildkite/macos-runners)" | head -20)
|
||||
|
||||
if [[ -n "$changed_files" ]] || [[ "${{ github.event.inputs.force_rebuild }}" == "true" ]]; then
|
||||
echo "should_rebuild=true" >> $GITHUB_OUTPUT
|
||||
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$changed_files" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "should_rebuild=false" >> $GITHUB_OUTPUT
|
||||
echo "changed_files=" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
build-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.should_rebuild == 'true'
|
||||
strategy:
|
||||
matrix:
|
||||
macos_version: [13, 14, 15]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Packer
|
||||
uses: hashicorp/setup-packer@main
|
||||
with:
|
||||
version: ${{ env.PACKER_VERSION }}
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq curl
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Validate Packer configuration
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
packer validate \
|
||||
-var "macos_version=${{ matrix.macos_version }}" \
|
||||
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
|
||||
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
|
||||
macos-base.pkr.hcl
|
||||
|
||||
- name: Build macOS ${{ matrix.macos_version }} image
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
echo "Building macOS ${{ matrix.macos_version }} image..."
|
||||
|
||||
# Set build variables
|
||||
export PACKER_LOG=1
|
||||
export PACKER_LOG_PATH="./packer-build-macos-${{ matrix.macos_version }}.log"
|
||||
|
||||
# Build the image
|
||||
packer build \
|
||||
-var "macos_version=${{ matrix.macos_version }}" \
|
||||
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
|
||||
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
|
||||
-var "base_image=base-images/macos-${{ matrix.macos_version }}-$([ ${{ matrix.macos_version }} -eq 13 ] && echo 'ventura' || [ ${{ matrix.macos_version }} -eq 14 ] && echo 'sonoma' || echo 'sequoia')" \
|
||||
macos-base.pkr.hcl
|
||||
|
||||
- name: Validate built image
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
echo "Validating built image..."
|
||||
|
||||
# Get the latest built image ID
|
||||
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
|
||||
|
||||
if [ -z "$IMAGE_ID" ]; then
|
||||
echo "❌ No image found for macOS ${{ matrix.macos_version }}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Found image: $IMAGE_ID"
|
||||
|
||||
# Create a test VM to validate the image
|
||||
VM_NAME="test-validation-${{ matrix.macos_version }}-$(date +%s)"
|
||||
|
||||
echo "Creating test VM: $VM_NAME"
|
||||
orka vm create \
|
||||
--name "$VM_NAME" \
|
||||
--image "$IMAGE_ID" \
|
||||
--cpu 4 \
|
||||
--memory 8 \
|
||||
--wait
|
||||
|
||||
# Wait for VM to be ready
|
||||
sleep 60
|
||||
|
||||
# Get VM IP
|
||||
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
|
||||
|
||||
echo "Testing VM at IP: $VM_IP"
|
||||
|
||||
# Test software installations
|
||||
echo "Testing software installations..."
|
||||
|
||||
# Test Node.js
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'node --version' || exit 1
|
||||
|
||||
# Test Bun
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'bun --version' || exit 1
|
||||
|
||||
# Test build tools
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'cmake --version' || exit 1
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'clang --version' || exit 1
|
||||
|
||||
# Test Docker
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'docker --version' || exit 1
|
||||
|
||||
# Test Tailscale
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'tailscale --version' || exit 1
|
||||
|
||||
# Test health endpoint
|
||||
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'curl -f http://localhost:8080/health' || exit 1
|
||||
|
||||
echo "✅ All software validations passed"
|
||||
|
||||
# Clean up test VM
|
||||
orka vm delete "$VM_NAME" --force
|
||||
|
||||
echo "✅ Image validation completed successfully"
|
||||
|
||||
- name: Run flakiness checks
|
||||
working-directory: .buildkite/macos-runners/packer
|
||||
run: |
|
||||
echo "Running flakiness checks..."
|
||||
|
||||
# Get the latest built image ID
|
||||
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
|
||||
|
||||
# Run multiple test iterations to check for flakiness
|
||||
ITERATIONS=3
|
||||
PASSED=0
|
||||
FAILED=0
|
||||
|
||||
for i in $(seq 1 $ITERATIONS); do
|
||||
echo "Running flakiness test iteration $i/$ITERATIONS..."
|
||||
|
||||
VM_NAME="flakiness-test-${{ matrix.macos_version }}-$i-$(date +%s)"
|
||||
|
||||
# Create test VM
|
||||
orka vm create \
|
||||
--name "$VM_NAME" \
|
||||
--image "$IMAGE_ID" \
|
||||
--cpu 4 \
|
||||
--memory 8 \
|
||||
--wait
|
||||
|
||||
sleep 30
|
||||
|
||||
# Get VM IP
|
||||
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
|
||||
|
||||
# Run a series of quick tests
|
||||
TEST_PASSED=true
|
||||
|
||||
# Test 1: Basic command execution
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'echo "test" > /tmp/test.txt && cat /tmp/test.txt'; then
|
||||
echo "❌ Basic command test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 2: Node.js execution
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'node -e "console.log(\"Node.js test\")"'; then
|
||||
echo "❌ Node.js test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 3: Bun execution
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'bun -e "console.log(\"Bun test\")"'; then
|
||||
echo "❌ Bun test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 4: Build tools
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'clang --version > /tmp/clang_version.txt'; then
|
||||
echo "❌ Clang test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 5: File system operations
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'mkdir -p /tmp/test_dir && touch /tmp/test_dir/test_file'; then
|
||||
echo "❌ File system test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Test 6: Process creation
|
||||
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'ps aux | grep -v grep | wc -l'; then
|
||||
echo "❌ Process test failed"
|
||||
TEST_PASSED=false
|
||||
fi
|
||||
|
||||
# Clean up test VM
|
||||
orka vm delete "$VM_NAME" --force
|
||||
|
||||
if [ "$TEST_PASSED" = true ]; then
|
||||
echo "✅ Iteration $i passed"
|
||||
PASSED=$((PASSED + 1))
|
||||
else
|
||||
echo "❌ Iteration $i failed"
|
||||
FAILED=$((FAILED + 1))
|
||||
fi
|
||||
|
||||
# Short delay between iterations
|
||||
sleep 10
|
||||
done
|
||||
|
||||
echo "Flakiness check results:"
|
||||
echo "- Passed: $PASSED/$ITERATIONS"
|
||||
echo "- Failed: $FAILED/$ITERATIONS"
|
||||
|
||||
# Calculate success rate
|
||||
SUCCESS_RATE=$((PASSED * 100 / ITERATIONS))
|
||||
echo "- Success rate: $SUCCESS_RATE%"
|
||||
|
||||
# Fail if success rate is below 80%
|
||||
if [ $SUCCESS_RATE -lt 80 ]; then
|
||||
echo "❌ Image is too flaky! Success rate: $SUCCESS_RATE% (minimum: 80%)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Flakiness checks passed with $SUCCESS_RATE% success rate"
|
||||
|
||||
- name: Upload build logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: packer-logs-macos-${{ matrix.macos_version }}
|
||||
path: .buildkite/macos-runners/packer/packer-build-macos-${{ matrix.macos_version }}.log
|
||||
retention-days: 7
|
||||
|
||||
- name: Notify on failure
|
||||
if: failure()
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: failure
|
||||
title: "macOS ${{ matrix.macos_version }} image build failed"
|
||||
description: |
|
||||
🔴 **macOS ${{ matrix.macos_version }} image build failed**
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
**Branch:** ${{ github.ref }}
|
||||
**Commit:** ${{ github.sha }}
|
||||
|
||||
[Check the logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
color: 0xff0000
|
||||
username: "GitHub Actions"
|
||||
|
||||
update-terraform:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images]
|
||||
if: needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Terraform
|
||||
uses: hashicorp/setup-terraform@v3
|
||||
with:
|
||||
terraform_version: ${{ env.TERRAFORM_VERSION }}
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Initialize Terraform
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform init
|
||||
terraform workspace select production || terraform workspace new production
|
||||
|
||||
- name: Plan Terraform changes
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform plan \
|
||||
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
|
||||
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
|
||||
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
|
||||
-out=tfplan
|
||||
|
||||
- name: Apply Terraform changes
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform apply -auto-approve tfplan
|
||||
|
||||
- name: Save Terraform outputs
|
||||
working-directory: .buildkite/macos-runners/terraform
|
||||
run: |
|
||||
terraform output -json > terraform-outputs.json
|
||||
|
||||
- name: Upload Terraform outputs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: terraform-outputs
|
||||
path: .buildkite/macos-runners/terraform/terraform-outputs.json
|
||||
retention-days: 30
|
||||
|
||||
cleanup-old-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images, update-terraform]
|
||||
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup AWS CLI
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Install MacStadium CLI
|
||||
run: |
|
||||
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
|
||||
sudo mv orka-cli /usr/local/bin/orka
|
||||
chmod +x /usr/local/bin/orka
|
||||
|
||||
- name: Configure MacStadium CLI
|
||||
run: |
|
||||
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
|
||||
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
|
||||
|
||||
- name: Clean up old images
|
||||
run: |
|
||||
echo "Cleaning up old images..."
|
||||
|
||||
# Get list of all images
|
||||
orka image list --output json > images.json
|
||||
|
||||
# Find images older than 7 days
|
||||
cutoff_date=$(date -d '7 days ago' +%s)
|
||||
|
||||
# Parse and delete old images
|
||||
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' images.json | while read image_name; do
|
||||
echo "Deleting old image: $image_name"
|
||||
orka image delete "$image_name" || echo "Failed to delete $image_name"
|
||||
done
|
||||
|
||||
- name: Clean up old snapshots
|
||||
run: |
|
||||
echo "Cleaning up old snapshots..."
|
||||
|
||||
# Get list of all snapshots
|
||||
orka snapshot list --output json > snapshots.json
|
||||
|
||||
# Find snapshots older than 7 days
|
||||
cutoff_date=$(date -d '7 days ago' +%s)
|
||||
|
||||
# Parse and delete old snapshots
|
||||
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' snapshots.json | while read snapshot_name; do
|
||||
echo "Deleting old snapshot: $snapshot_name"
|
||||
orka snapshot delete "$snapshot_name" || echo "Failed to delete $snapshot_name"
|
||||
done
|
||||
|
||||
health-check:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images, update-terraform]
|
||||
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup AWS CLI
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-2
|
||||
|
||||
- name: Install MacStadium CLI
|
||||
run: |
|
||||
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
|
||||
sudo mv orka-cli /usr/local/bin/orka
|
||||
chmod +x /usr/local/bin/orka
|
||||
|
||||
- name: Configure MacStadium CLI
|
||||
run: |
|
||||
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
|
||||
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
|
||||
|
||||
- name: Health check VMs
|
||||
run: |
|
||||
echo "Performing health check on VMs..."
|
||||
|
||||
# Get list of running VMs
|
||||
orka vm list --output json > vms.json
|
||||
|
||||
# Check each VM
|
||||
jq -r '.[] | select(.name | test("^bun-runner-")) | select(.status == "running") | "\(.name) \(.ip_address)"' vms.json | while read vm_name vm_ip; do
|
||||
echo "Checking VM: $vm_name ($vm_ip)"
|
||||
|
||||
# Check if VM is responding to health checks
|
||||
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
|
||||
echo "✅ $vm_name is healthy"
|
||||
else
|
||||
echo "❌ $vm_name is not responding to health checks"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Check Buildkite agent connectivity
|
||||
run: |
|
||||
echo "Checking Buildkite agent connectivity..."
|
||||
|
||||
# Use Buildkite API to check connected agents
|
||||
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
|
||||
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
|
||||
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state)"' | \
|
||||
while read agent_name state; do
|
||||
echo "Agent: $agent_name - State: $state"
|
||||
done
|
||||
|
||||
notify-success:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-changes, build-images, update-terraform, cleanup-old-images, health-check]
|
||||
if: always() && needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
|
||||
|
||||
steps:
|
||||
- name: Notify success
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: success
|
||||
title: "macOS runner images rebuilt successfully"
|
||||
description: |
|
||||
✅ **macOS runner images rebuilt successfully**
|
||||
|
||||
**Repository:** ${{ github.repository }}
|
||||
**Branch:** ${{ github.ref }}
|
||||
**Commit:** ${{ github.sha }}
|
||||
|
||||
**Changes detected in:**
|
||||
${{ needs.check-changes.outputs.changed_files }}
|
||||
|
||||
**Images built:** ${{ join(github.event.inputs.macos_versions || '13,14,15', ', ') }}
|
||||
|
||||
[Check the deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
|
||||
color: 0x00ff00
|
||||
username: "GitHub Actions"
|
||||
|
||||
notify-skip:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-changes
|
||||
if: needs.check-changes.outputs.should_rebuild == 'false'
|
||||
|
||||
steps:
|
||||
- name: Notify skip
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
status: cancelled
|
||||
title: "macOS runner image rebuild skipped"
|
||||
description: |
|
||||
ℹ️ **macOS runner image rebuild skipped** - no changes detected in the last 24 hours
|
||||
color: 0x808080
|
||||
username: "GitHub Actions"
|
||||
270
.buildkite/macos-runners/packer/macos-base.pkr.hcl
Normal file
270
.buildkite/macos-runners/packer/macos-base.pkr.hcl
Normal file
@@ -0,0 +1,270 @@
|
||||
packer {
|
||||
required_plugins {
|
||||
macstadium-orka = {
|
||||
version = ">= 3.0.0"
|
||||
source = "github.com/macstadium/macstadium-orka"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "orka_endpoint" {
|
||||
description = "MacStadium Orka endpoint"
|
||||
type = string
|
||||
default = env("ORKA_ENDPOINT")
|
||||
}
|
||||
|
||||
variable "orka_auth_token" {
|
||||
description = "MacStadium Orka auth token"
|
||||
type = string
|
||||
default = env("ORKA_AUTH_TOKEN")
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "base_image" {
|
||||
description = "Base macOS image to use"
|
||||
type = string
|
||||
default = "base-images/macos-15-sequoia"
|
||||
}
|
||||
|
||||
variable "macos_version" {
|
||||
description = "macOS version (13, 14, 15)"
|
||||
type = string
|
||||
default = "15"
|
||||
}
|
||||
|
||||
variable "cpu_count" {
|
||||
description = "Number of CPU cores"
|
||||
type = number
|
||||
default = 12
|
||||
}
|
||||
|
||||
variable "memory_gb" {
|
||||
description = "Memory in GB"
|
||||
type = number
|
||||
default = 32
|
||||
}
|
||||
|
||||
source "macstadium-orka" "base" {
|
||||
orka_endpoint = var.orka_endpoint
|
||||
orka_auth_token = var.orka_auth_token
|
||||
|
||||
source_image = var.base_image
|
||||
image_name = "bun-macos-${var.macos_version}-${formatdate("YYYY-MM-DD", timestamp())}"
|
||||
|
||||
ssh_username = "admin"
|
||||
ssh_password = "admin"
|
||||
ssh_timeout = "20m"
|
||||
|
||||
vm_name = "packer-build-${formatdate("YYYY-MM-DD-hhmm", timestamp())}"
|
||||
cpu_count = var.cpu_count
|
||||
memory_gb = var.memory_gb
|
||||
|
||||
# Enable GPU acceleration for better performance
|
||||
gpu_passthrough = true
|
||||
|
||||
# Network configuration
|
||||
vnc_bind_address = "0.0.0.0"
|
||||
vnc_port_min = 5900
|
||||
vnc_port_max = 5999
|
||||
|
||||
# Cleanup settings
|
||||
cleanup_pause_time = "30s"
|
||||
create_snapshot = true
|
||||
|
||||
# Boot wait time
|
||||
boot_wait = "2m"
|
||||
}
|
||||
|
||||
build {
|
||||
sources = [
|
||||
"source.macstadium-orka.base"
|
||||
]
|
||||
|
||||
# Wait for SSH to be ready
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Waiting for system to be ready...'",
|
||||
"until ping -c1 google.com &>/dev/null; do sleep 1; done",
|
||||
"echo 'Network is ready'"
|
||||
]
|
||||
timeout = "10m"
|
||||
}
|
||||
|
||||
# Install Xcode Command Line Tools
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Installing Xcode Command Line Tools...'",
|
||||
"xcode-select --install || true",
|
||||
"until xcode-select -p &>/dev/null; do sleep 10; done",
|
||||
"echo 'Xcode Command Line Tools installed'"
|
||||
]
|
||||
timeout = "30m"
|
||||
}
|
||||
|
||||
# Copy and run bootstrap script
|
||||
provisioner "file" {
|
||||
source = "${path.root}/../scripts/bootstrap-macos.sh"
|
||||
destination = "/tmp/bootstrap-macos.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"chmod +x /tmp/bootstrap-macos.sh",
|
||||
"sudo /tmp/bootstrap-macos.sh --ci"
|
||||
]
|
||||
timeout = "60m"
|
||||
}
|
||||
|
||||
# Install additional macOS-specific tools
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Installing additional macOS tools...'",
|
||||
"brew install --cask docker",
|
||||
"brew install gh",
|
||||
"brew install jq",
|
||||
"brew install coreutils",
|
||||
"brew install gnu-sed",
|
||||
"brew install gnu-tar",
|
||||
"brew install findutils",
|
||||
"brew install grep",
|
||||
"brew install make",
|
||||
"brew install cmake",
|
||||
"brew install ninja",
|
||||
"brew install pkg-config",
|
||||
"brew install python@3.11",
|
||||
"brew install python@3.12",
|
||||
"brew install go",
|
||||
"brew install rust",
|
||||
"brew install node",
|
||||
"brew install bun",
|
||||
"brew install wget",
|
||||
"brew install tree",
|
||||
"brew install htop",
|
||||
"brew install watch",
|
||||
"brew install tmux",
|
||||
"brew install screen"
|
||||
]
|
||||
timeout = "30m"
|
||||
}
|
||||
|
||||
# Install Buildkite agent
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Installing Buildkite agent...'",
|
||||
"brew install buildkite/buildkite/buildkite-agent",
|
||||
"sudo mkdir -p /usr/local/var/buildkite-agent",
|
||||
"sudo mkdir -p /usr/local/var/log/buildkite-agent",
|
||||
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
|
||||
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent"
|
||||
]
|
||||
timeout = "10m"
|
||||
}
|
||||
|
||||
# Copy user management scripts
|
||||
provisioner "file" {
|
||||
source = "${path.root}/../scripts/"
|
||||
destination = "/tmp/scripts/"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"sudo mkdir -p /usr/local/bin/bun-ci",
|
||||
"sudo cp /tmp/scripts/create-build-user.sh /usr/local/bin/bun-ci/",
|
||||
"sudo cp /tmp/scripts/cleanup-build-user.sh /usr/local/bin/bun-ci/",
|
||||
"sudo cp /tmp/scripts/job-runner.sh /usr/local/bin/bun-ci/",
|
||||
"sudo chmod +x /usr/local/bin/bun-ci/*.sh"
|
||||
]
|
||||
}
|
||||
|
||||
# Configure system settings for CI
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Configuring system for CI...'",
|
||||
"# Disable sleep and screensaver",
|
||||
"sudo pmset -a displaysleep 0 sleep 0 disksleep 0",
|
||||
"sudo pmset -a womp 1",
|
||||
"# Disable automatic updates",
|
||||
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false",
|
||||
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false",
|
||||
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false",
|
||||
"# Increase file descriptor limits",
|
||||
"echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf",
|
||||
"echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf",
|
||||
"# Enable core dumps",
|
||||
"sudo mkdir -p /cores",
|
||||
"sudo chmod 777 /cores",
|
||||
"echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf"
|
||||
]
|
||||
}
|
||||
|
||||
# Configure LaunchDaemon for Buildkite agent
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Configuring Buildkite LaunchDaemon...'",
|
||||
"sudo tee /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist > /dev/null <<EOF",
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
|
||||
"<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">",
|
||||
"<plist version=\"1.0\">",
|
||||
"<dict>",
|
||||
" <key>Label</key>",
|
||||
" <string>com.buildkite.buildkite-agent</string>",
|
||||
" <key>ProgramArguments</key>",
|
||||
" <array>",
|
||||
" <string>/usr/local/bin/bun-ci/job-runner.sh</string>",
|
||||
" </array>",
|
||||
" <key>RunAtLoad</key>",
|
||||
" <true/>",
|
||||
" <key>KeepAlive</key>",
|
||||
" <true/>",
|
||||
" <key>StandardOutPath</key>",
|
||||
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.log</string>",
|
||||
" <key>StandardErrorPath</key>",
|
||||
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.error.log</string>",
|
||||
" <key>EnvironmentVariables</key>",
|
||||
" <dict>",
|
||||
" <key>PATH</key>",
|
||||
" <string>/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>",
|
||||
" </dict>",
|
||||
"</dict>",
|
||||
"</plist>",
|
||||
"EOF"
|
||||
]
|
||||
}
|
||||
|
||||
# Clean up
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Cleaning up...'",
|
||||
"rm -rf /tmp/bootstrap-macos.sh /tmp/scripts/",
|
||||
"sudo rm -rf /var/log/*.log /var/log/*/*.log",
|
||||
"sudo rm -rf /tmp/* /var/tmp/*",
|
||||
"# Clean Homebrew cache",
|
||||
"brew cleanup --prune=all",
|
||||
"# Clean npm cache",
|
||||
"npm cache clean --force",
|
||||
"# Clean pip cache",
|
||||
"pip3 cache purge || true",
|
||||
"# Clean cargo cache",
|
||||
"cargo cache --remove-if-older-than 1d || true",
|
||||
"# Clean system caches",
|
||||
"sudo rm -rf /System/Library/Caches/*",
|
||||
"sudo rm -rf /Library/Caches/*",
|
||||
"rm -rf ~/Library/Caches/*",
|
||||
"echo 'Cleanup completed'"
|
||||
]
|
||||
}
|
||||
|
||||
# Final system preparation
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"echo 'Final system preparation...'",
|
||||
"# Ensure proper permissions",
|
||||
"sudo chown -R admin:admin /usr/local/bin/bun-ci",
|
||||
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
|
||||
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent",
|
||||
"# Load the LaunchDaemon",
|
||||
"sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist",
|
||||
"echo 'Image preparation completed'"
|
||||
]
|
||||
}
|
||||
}
|
||||
400
.buildkite/macos-runners/scripts/bootstrap-macos.sh
Executable file
400
.buildkite/macos-runners/scripts/bootstrap-macos.sh
Executable file
@@ -0,0 +1,400 @@
|
||||
#!/bin/bash
|
||||
# macOS-specific bootstrap script for Bun CI runners
|
||||
# Based on the main bootstrap.sh but optimized for macOS CI environments
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "$@"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "error: $@" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
execute() {
|
||||
print "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
error "Command failed: $@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -eq 0 ]]; then
|
||||
error "This script should not be run as root"
|
||||
fi
|
||||
|
||||
# Check if running on macOS
|
||||
if [[ "$(uname -s)" != "Darwin" ]]; then
|
||||
error "This script is designed for macOS only"
|
||||
fi
|
||||
|
||||
print "Starting macOS bootstrap for Bun CI..."
|
||||
|
||||
# Get macOS version
|
||||
MACOS_VERSION=$(sw_vers -productVersion)
|
||||
MACOS_MAJOR=$(echo "$MACOS_VERSION" | cut -d. -f1)
|
||||
MACOS_MINOR=$(echo "$MACOS_VERSION" | cut -d. -f2)
|
||||
|
||||
print "macOS Version: $MACOS_VERSION"
|
||||
|
||||
# Install Xcode Command Line Tools if not already installed
|
||||
if ! xcode-select -p &>/dev/null; then
|
||||
print "Installing Xcode Command Line Tools..."
|
||||
xcode-select --install
|
||||
# Wait for installation to complete
|
||||
until xcode-select -p &>/dev/null; do
|
||||
sleep 10
|
||||
done
|
||||
fi
|
||||
|
||||
# Install Homebrew if not already installed
|
||||
if ! command -v brew &>/dev/null; then
|
||||
print "Installing Homebrew..."
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
||||
|
||||
# Add Homebrew to PATH
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> ~/.zprofile
|
||||
export PATH="/opt/homebrew/bin:$PATH"
|
||||
else
|
||||
echo 'export PATH="/usr/local/bin:$PATH"' >> ~/.zprofile
|
||||
export PATH="/usr/local/bin:$PATH"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Configure Homebrew for CI
|
||||
export HOMEBREW_NO_INSTALL_CLEANUP=1
|
||||
export HOMEBREW_NO_AUTO_UPDATE=1
|
||||
export HOMEBREW_NO_ANALYTICS=1
|
||||
|
||||
# Update Homebrew
|
||||
print "Updating Homebrew..."
|
||||
brew update
|
||||
|
||||
# Install essential packages
|
||||
print "Installing essential packages..."
|
||||
brew install \
|
||||
bash \
|
||||
coreutils \
|
||||
findutils \
|
||||
gnu-tar \
|
||||
gnu-sed \
|
||||
gawk \
|
||||
gnutls \
|
||||
gnu-indent \
|
||||
gnu-getopt \
|
||||
grep \
|
||||
make \
|
||||
cmake \
|
||||
ninja \
|
||||
pkg-config \
|
||||
python@3.11 \
|
||||
python@3.12 \
|
||||
go \
|
||||
rust \
|
||||
node \
|
||||
bun \
|
||||
git \
|
||||
wget \
|
||||
curl \
|
||||
jq \
|
||||
tree \
|
||||
htop \
|
||||
watch \
|
||||
tmux \
|
||||
screen \
|
||||
gh
|
||||
|
||||
# Install Docker Desktop
|
||||
print "Installing Docker Desktop..."
|
||||
if [[ ! -d "/Applications/Docker.app" ]]; then
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
curl -L "https://desktop.docker.com/mac/main/arm64/Docker.dmg" -o /tmp/Docker.dmg
|
||||
else
|
||||
curl -L "https://desktop.docker.com/mac/main/amd64/Docker.dmg" -o /tmp/Docker.dmg
|
||||
fi
|
||||
|
||||
hdiutil attach /tmp/Docker.dmg
|
||||
cp -R /Volumes/Docker/Docker.app /Applications/
|
||||
hdiutil detach /Volumes/Docker
|
||||
rm /tmp/Docker.dmg
|
||||
fi
|
||||
|
||||
# Install Buildkite agent
|
||||
print "Installing Buildkite agent..."
|
||||
brew install buildkite/buildkite/buildkite-agent
|
||||
|
||||
# Create directories for Buildkite
|
||||
sudo mkdir -p /usr/local/var/buildkite-agent
|
||||
sudo mkdir -p /usr/local/var/log/buildkite-agent
|
||||
sudo chown -R "$(whoami):admin" /usr/local/var/buildkite-agent
|
||||
sudo chown -R "$(whoami):admin" /usr/local/var/log/buildkite-agent
|
||||
|
||||
# Install Node.js versions (exact version from bootstrap.sh)
|
||||
print "Installing specific Node.js version..."
|
||||
NODE_VERSION="24.3.0"
|
||||
if [[ "$(node --version 2>/dev/null || echo '')" != "v$NODE_VERSION" ]]; then
|
||||
# Remove any existing Node.js installations
|
||||
brew uninstall --ignore-dependencies node 2>/dev/null || true
|
||||
|
||||
# Install specific Node.js version
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
NODE_ARCH="arm64"
|
||||
else
|
||||
NODE_ARCH="x64"
|
||||
fi
|
||||
|
||||
NODE_URL="https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
|
||||
NODE_TAR="/tmp/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
|
||||
|
||||
curl -fsSL "$NODE_URL" -o "$NODE_TAR"
|
||||
sudo tar -xzf "$NODE_TAR" -C /usr/local --strip-components=1
|
||||
rm "$NODE_TAR"
|
||||
|
||||
# Verify installation
|
||||
if [[ "$(node --version)" != "v$NODE_VERSION" ]]; then
|
||||
error "Node.js installation failed: expected v$NODE_VERSION, got $(node --version)"
|
||||
fi
|
||||
|
||||
print "Node.js v$NODE_VERSION installed successfully"
|
||||
fi
|
||||
|
||||
# Install Node.js headers (matching bootstrap.sh)
|
||||
print "Installing Node.js headers..."
|
||||
NODE_HEADERS_URL="https://nodejs.org/download/release/v$NODE_VERSION/node-v$NODE_VERSION-headers.tar.gz"
|
||||
NODE_HEADERS_TAR="/tmp/node-v$NODE_VERSION-headers.tar.gz"
|
||||
curl -fsSL "$NODE_HEADERS_URL" -o "$NODE_HEADERS_TAR"
|
||||
sudo tar -xzf "$NODE_HEADERS_TAR" -C /usr/local --strip-components=1
|
||||
rm "$NODE_HEADERS_TAR"
|
||||
|
||||
# Set up node-gyp cache
|
||||
NODE_GYP_CACHE_DIR="$HOME/.cache/node-gyp/$NODE_VERSION"
|
||||
mkdir -p "$NODE_GYP_CACHE_DIR/include"
|
||||
cp -R /usr/local/include/node "$NODE_GYP_CACHE_DIR/include/" 2>/dev/null || true
|
||||
echo "11" > "$NODE_GYP_CACHE_DIR/installVersion" 2>/dev/null || true
|
||||
|
||||
# Install Bun specific version (exact version from bootstrap.sh)
|
||||
print "Installing specific Bun version..."
|
||||
BUN_VERSION="1.2.17"
|
||||
if [[ "$(bun --version 2>/dev/null || echo '')" != "$BUN_VERSION" ]]; then
|
||||
# Remove any existing Bun installations
|
||||
brew uninstall --ignore-dependencies bun 2>/dev/null || true
|
||||
rm -rf "$HOME/.bun" 2>/dev/null || true
|
||||
|
||||
# Install specific Bun version
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
BUN_TRIPLET="bun-darwin-aarch64"
|
||||
else
|
||||
BUN_TRIPLET="bun-darwin-x64"
|
||||
fi
|
||||
|
||||
BUN_URL="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$BUN_VERSION/$BUN_TRIPLET.zip"
|
||||
BUN_ZIP="/tmp/$BUN_TRIPLET.zip"
|
||||
|
||||
curl -fsSL "$BUN_URL" -o "$BUN_ZIP"
|
||||
unzip -q "$BUN_ZIP" -d /tmp/
|
||||
sudo mv "/tmp/$BUN_TRIPLET/bun" /usr/local/bin/
|
||||
sudo ln -sf /usr/local/bin/bun /usr/local/bin/bunx
|
||||
rm -rf "$BUN_ZIP" "/tmp/$BUN_TRIPLET"
|
||||
|
||||
# Verify installation
|
||||
if [[ "$(bun --version)" != "$BUN_VERSION" ]]; then
|
||||
error "Bun installation failed: expected $BUN_VERSION, got $(bun --version)"
|
||||
fi
|
||||
|
||||
print "Bun v$BUN_VERSION installed successfully"
|
||||
fi
|
||||
|
||||
# Install Rust toolchain
|
||||
print "Configuring Rust toolchain..."
|
||||
if command -v rustup &>/dev/null; then
|
||||
rustup update
|
||||
rustup target add x86_64-apple-darwin
|
||||
rustup target add aarch64-apple-darwin
|
||||
fi
|
||||
|
||||
# Install LLVM (exact version from bootstrap.sh)
|
||||
print "Installing LLVM..."
|
||||
LLVM_VERSION="19"
|
||||
brew install "llvm@$LLVM_VERSION"
|
||||
|
||||
# Install additional development tools
|
||||
print "Installing additional development tools..."
|
||||
brew install \
|
||||
clang-format \
|
||||
ccache \
|
||||
ninja \
|
||||
meson \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
gettext \
|
||||
openssl \
|
||||
readline \
|
||||
sqlite \
|
||||
xz \
|
||||
zlib \
|
||||
libyaml \
|
||||
libffi \
|
||||
pkg-config
|
||||
|
||||
# Install CMake (specific version from bootstrap.sh)
|
||||
print "Installing CMake..."
|
||||
CMAKE_VERSION="3.30.5"
|
||||
brew uninstall --ignore-dependencies cmake 2>/dev/null || true
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
CMAKE_ARCH="macos-universal"
|
||||
else
|
||||
CMAKE_ARCH="macos-universal"
|
||||
fi
|
||||
CMAKE_URL="https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
|
||||
CMAKE_TAR="/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
|
||||
curl -fsSL "$CMAKE_URL" -o "$CMAKE_TAR"
|
||||
tar -xzf "$CMAKE_TAR" -C /tmp/
|
||||
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/bin/"* /usr/local/bin/
|
||||
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/share/"* /usr/local/share/
|
||||
rm -rf "$CMAKE_TAR" "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH"
|
||||
|
||||
# Install Age for core dump encryption (macOS equivalent)
|
||||
print "Installing Age for encryption..."
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-arm64.tar.gz"
|
||||
AGE_SHA256="4a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
|
||||
else
|
||||
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-amd64.tar.gz"
|
||||
AGE_SHA256="5a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
|
||||
fi
|
||||
AGE_TAR="/tmp/age.tar.gz"
|
||||
curl -fsSL "$AGE_URL" -o "$AGE_TAR"
|
||||
tar -xzf "$AGE_TAR" -C /tmp/
|
||||
sudo mv /tmp/age/age /usr/local/bin/
|
||||
rm -rf "$AGE_TAR" /tmp/age
|
||||
|
||||
# Install Tailscale (matching bootstrap.sh implementation)
|
||||
print "Installing Tailscale..."
|
||||
if [[ "$docker" != "1" ]]; then
|
||||
if [[ ! -d "/Applications/Tailscale.app" ]]; then
|
||||
# Install via Homebrew for easier management
|
||||
brew install --cask tailscale
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install Chromium dependencies for testing
|
||||
print "Installing Chromium for testing..."
|
||||
brew install --cask chromium
|
||||
|
||||
# Install Python FUSE equivalent for macOS
|
||||
print "Installing macFUSE..."
|
||||
if [[ ! -d "/Library/Frameworks/macFUSE.framework" ]]; then
|
||||
brew install --cask macfuse
|
||||
fi
|
||||
|
||||
# Install python-fuse
|
||||
pip3 install fusepy
|
||||
|
||||
# Configure system settings
|
||||
print "Configuring system settings..."
|
||||
|
||||
# Disable sleep and screensaver
|
||||
sudo pmset -a displaysleep 0 sleep 0 disksleep 0
|
||||
sudo pmset -a womp 1
|
||||
|
||||
# Disable automatic updates
|
||||
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
|
||||
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false
|
||||
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false
|
||||
|
||||
# Increase file descriptor limits
|
||||
echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf
|
||||
echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf
|
||||
|
||||
# Enable core dumps
|
||||
sudo mkdir -p /cores
|
||||
sudo chmod 777 /cores
|
||||
echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf
|
||||
|
||||
# Configure shell environment
|
||||
print "Configuring shell environment..."
|
||||
|
||||
# Add Homebrew paths to shell profiles
|
||||
SHELL_PROFILES=(.zshrc .zprofile .bash_profile .bashrc)
|
||||
for profile in "${SHELL_PROFILES[@]}"; do
|
||||
if [[ -f "$HOME/$profile" ]] || [[ "$1" == "--ci" ]]; then
|
||||
if [[ "$(uname -m)" == "arm64" ]]; then
|
||||
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> "$HOME/$profile"
|
||||
else
|
||||
echo 'export PATH="/usr/local/bin:$PATH"' >> "$HOME/$profile"
|
||||
fi
|
||||
|
||||
# Add other useful paths
|
||||
echo 'export PATH="/usr/local/bin/bun-ci:$PATH"' >> "$HOME/$profile"
|
||||
echo 'export PATH="/usr/local/sbin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# Environment variables for CI
|
||||
echo 'export HOMEBREW_NO_INSTALL_CLEANUP=1' >> "$HOME/$profile"
|
||||
echo 'export HOMEBREW_NO_AUTO_UPDATE=1' >> "$HOME/$profile"
|
||||
echo 'export HOMEBREW_NO_ANALYTICS=1' >> "$HOME/$profile"
|
||||
echo 'export CI=1' >> "$HOME/$profile"
|
||||
echo 'export BUILDKITE=true' >> "$HOME/$profile"
|
||||
|
||||
# Development environment variables
|
||||
echo 'export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"' >> "$HOME/$profile"
|
||||
echo 'export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"' >> "$HOME/$profile"
|
||||
|
||||
# Node.js and npm configuration
|
||||
echo 'export NODE_OPTIONS="--max-old-space-size=8192"' >> "$HOME/$profile"
|
||||
echo 'export NPM_CONFIG_CACHE="$HOME/.npm"' >> "$HOME/$profile"
|
||||
|
||||
# Rust configuration
|
||||
echo 'export CARGO_HOME="$HOME/.cargo"' >> "$HOME/$profile"
|
||||
echo 'export RUSTUP_HOME="$HOME/.rustup"' >> "$HOME/$profile"
|
||||
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# Go configuration
|
||||
echo 'export GOPATH="$HOME/go"' >> "$HOME/$profile"
|
||||
echo 'export PATH="$GOPATH/bin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# Python configuration
|
||||
echo 'export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"' >> "$HOME/$profile"
|
||||
|
||||
# Bun configuration
|
||||
echo 'export BUN_INSTALL="$HOME/.bun"' >> "$HOME/$profile"
|
||||
echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> "$HOME/$profile"
|
||||
|
||||
# LLVM configuration
|
||||
echo 'export PATH="/usr/local/opt/llvm/bin:$PATH"' >> "$HOME/$profile"
|
||||
echo 'export LDFLAGS="-L/usr/local/opt/llvm/lib"' >> "$HOME/$profile"
|
||||
echo 'export CPPFLAGS="-I/usr/local/opt/llvm/include"' >> "$HOME/$profile"
|
||||
fi
|
||||
done
|
||||
|
||||
# Create symbolic links for GNU tools
|
||||
print "Creating symbolic links for GNU tools..."
|
||||
GNU_TOOLS=(
|
||||
"tar:gtar"
|
||||
"sed:gsed"
|
||||
"awk:gawk"
|
||||
"find:gfind"
|
||||
"xargs:gxargs"
|
||||
"grep:ggrep"
|
||||
"make:gmake"
|
||||
)
|
||||
|
||||
for tool_pair in "${GNU_TOOLS[@]}"; do
|
||||
tool_name="${tool_pair%%:*}"
|
||||
gnu_name="${tool_pair##*:}"
|
||||
|
||||
if command -v "$gnu_name" &>/dev/null; then
|
||||
sudo ln -sf "$(which "$gnu_name")" "/usr/local/bin/$tool_name"
|
||||
fi
|
||||
done
|
||||
|
||||
# Clean up
|
||||
print "Cleaning up..."
|
||||
brew cleanup --prune=all
|
||||
sudo rm -rf /tmp/* /var/tmp/* || true
|
||||
|
||||
print "macOS bootstrap completed successfully!"
|
||||
print "System is ready for Bun CI workloads."
|
||||
141
.buildkite/macos-runners/scripts/cleanup-build-user.sh
Executable file
141
.buildkite/macos-runners/scripts/cleanup-build-user.sh
Executable file
@@ -0,0 +1,141 @@
|
||||
#!/bin/bash
|
||||
# Clean up build user and all associated processes/files
|
||||
# This ensures complete cleanup after each job
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root"
|
||||
fi
|
||||
|
||||
USERNAME="${1:-}"
|
||||
if [[ -z "$USERNAME" ]]; then
|
||||
error "Usage: $0 <username>"
|
||||
fi
|
||||
|
||||
print "Cleaning up build user: ${USERNAME}"
|
||||
|
||||
# Check if user exists
|
||||
if ! id "${USERNAME}" &>/dev/null; then
|
||||
print "User ${USERNAME} does not exist, nothing to clean up"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
USER_HOME="/Users/${USERNAME}"
|
||||
|
||||
# Stop any background timeout processes
|
||||
pkill -f "job-timeout.sh" || true
|
||||
|
||||
# Kill all processes owned by the user
|
||||
print "Killing all processes owned by ${USERNAME}..."
|
||||
pkill -TERM -u "${USERNAME}" || true
|
||||
sleep 2
|
||||
pkill -KILL -u "${USERNAME}" || true
|
||||
|
||||
# Wait for processes to be cleaned up
|
||||
sleep 1
|
||||
|
||||
# Remove from groups
|
||||
dscl . delete /Groups/admin GroupMembership "${USERNAME}" 2>/dev/null || true
|
||||
dscl . delete /Groups/wheel GroupMembership "${USERNAME}" 2>/dev/null || true
|
||||
dscl . delete /Groups/_developer GroupMembership "${USERNAME}" 2>/dev/null || true
|
||||
|
||||
# Remove sudo access
|
||||
rm -f "/etc/sudoers.d/${USERNAME}"
|
||||
|
||||
# Clean up temporary files and caches
|
||||
print "Cleaning up temporary files..."
|
||||
if [[ -d "${USER_HOME}" ]]; then
|
||||
# Clean up known cache directories
|
||||
rm -rf "${USER_HOME}/.npm/_cacache" || true
|
||||
rm -rf "${USER_HOME}/.npm/_logs" || true
|
||||
rm -rf "${USER_HOME}/.cargo/registry" || true
|
||||
rm -rf "${USER_HOME}/.cargo/git" || true
|
||||
rm -rf "${USER_HOME}/.rustup/tmp" || true
|
||||
rm -rf "${USER_HOME}/.cache" || true
|
||||
rm -rf "${USER_HOME}/Library/Caches" || true
|
||||
rm -rf "${USER_HOME}/Library/Logs" || true
|
||||
rm -rf "${USER_HOME}/Library/Application Support/Crash Reports" || true
|
||||
rm -rf "${USER_HOME}/tmp" || true
|
||||
rm -rf "${USER_HOME}/.bun/install/cache" || true
|
||||
|
||||
# Clean up workspace
|
||||
rm -rf "${USER_HOME}/workspace" || true
|
||||
|
||||
# Clean up any Docker containers/images created by this user
|
||||
if command -v docker &>/dev/null; then
|
||||
docker ps -a --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rm -f || true
|
||||
docker images --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rmi -f || true
|
||||
fi
|
||||
fi
|
||||
|
||||
# Clean up system-wide temporary files related to this user
|
||||
rm -rf "/tmp/${USERNAME}-"* || true
|
||||
rm -rf "/var/tmp/${USERNAME}-"* || true
|
||||
|
||||
# Clean up any core dumps
|
||||
rm -f "/cores/core.${USERNAME}."* || true
|
||||
|
||||
# Clean up any launchd jobs
|
||||
launchctl list | grep -E "^[0-9].*${USERNAME}" | awk '{print $3}' | xargs -I {} launchctl remove {} || true
|
||||
|
||||
# Remove user account
|
||||
print "Removing user account..."
|
||||
dscl . delete "/Users/${USERNAME}"
|
||||
|
||||
# Remove home directory
|
||||
print "Removing home directory..."
|
||||
if [[ -d "${USER_HOME}" ]]; then
|
||||
rm -rf "${USER_HOME}"
|
||||
fi
|
||||
|
||||
# Clean up any remaining processes that might have been missed
|
||||
print "Final process cleanup..."
|
||||
ps aux | grep -E "^${USERNAME}\s" | awk '{print $2}' | xargs -r kill -9 || true
|
||||
|
||||
# Clean up shared memory segments
|
||||
ipcs -m | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -m || true
|
||||
|
||||
# Clean up semaphores
|
||||
ipcs -s | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -s || true
|
||||
|
||||
# Clean up message queues
|
||||
ipcs -q | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -q || true
|
||||
|
||||
# Clean up any remaining files owned by the user
|
||||
print "Cleaning up remaining files..."
|
||||
find /tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
|
||||
find /var/tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
|
||||
|
||||
# Clean up any network interfaces or ports that might be held
|
||||
lsof -t -u "${USERNAME}" 2>/dev/null | xargs -r kill -9 || true
|
||||
|
||||
# Clean up any mount points
|
||||
mount | grep "${USERNAME}" | awk '{print $3}' | xargs -r umount || true
|
||||
|
||||
# Verify cleanup
|
||||
if id "${USERNAME}" &>/dev/null; then
|
||||
error "Failed to remove user ${USERNAME}"
|
||||
fi
|
||||
|
||||
if [[ -d "${USER_HOME}" ]]; then
|
||||
error "Failed to remove home directory ${USER_HOME}"
|
||||
fi
|
||||
|
||||
print "Build user ${USERNAME} cleaned up successfully"
|
||||
|
||||
# Free up memory
|
||||
sync
|
||||
purge || true
|
||||
|
||||
print "Cleanup completed"
|
||||
158
.buildkite/macos-runners/scripts/create-build-user.sh
Executable file
158
.buildkite/macos-runners/scripts/create-build-user.sh
Executable file
@@ -0,0 +1,158 @@
|
||||
#!/bin/bash
|
||||
# Create isolated build user for each Buildkite job
|
||||
# This ensures complete isolation between jobs
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check if running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root"
|
||||
fi
|
||||
|
||||
# Generate unique user name
|
||||
JOB_ID="${BUILDKITE_JOB_ID:-$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)}"
|
||||
USERNAME="bk-${JOB_ID}"
|
||||
USER_HOME="/Users/${USERNAME}"
|
||||
|
||||
print "Creating build user: ${USERNAME}"
|
||||
|
||||
# Check if user already exists
|
||||
if id "${USERNAME}" &>/dev/null; then
|
||||
print "User ${USERNAME} already exists, cleaning up first..."
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "${USERNAME}"
|
||||
fi
|
||||
|
||||
# Find next available UID (starting from 1000)
|
||||
NEXT_UID=1000
|
||||
while id -u "${NEXT_UID}" &>/dev/null; do
|
||||
((NEXT_UID++))
|
||||
done
|
||||
|
||||
print "Using UID: ${NEXT_UID}"
|
||||
|
||||
# Create user account
|
||||
dscl . create "/Users/${USERNAME}"
|
||||
dscl . create "/Users/${USERNAME}" UserShell /bin/bash
|
||||
dscl . create "/Users/${USERNAME}" RealName "Buildkite Job ${JOB_ID}"
|
||||
dscl . create "/Users/${USERNAME}" UniqueID "${NEXT_UID}"
|
||||
dscl . create "/Users/${USERNAME}" PrimaryGroupID 20 # staff group
|
||||
dscl . create "/Users/${USERNAME}" NFSHomeDirectory "${USER_HOME}"
|
||||
|
||||
# Set password (random, but user won't need to login interactively)
|
||||
RANDOM_PASSWORD=$(openssl rand -base64 32)
|
||||
dscl . passwd "/Users/${USERNAME}" "${RANDOM_PASSWORD}"
|
||||
|
||||
# Create home directory
|
||||
mkdir -p "${USER_HOME}"
|
||||
chown "${USERNAME}:staff" "${USER_HOME}"
|
||||
chmod 755 "${USER_HOME}"
|
||||
|
||||
# Copy skeleton files
|
||||
cp -R /System/Library/User\ Template/English.lproj/. "${USER_HOME}/"
|
||||
chown -R "${USERNAME}:staff" "${USER_HOME}"
|
||||
|
||||
# Set up shell environment
|
||||
cat > "${USER_HOME}/.zshrc" << 'EOF'
|
||||
# Buildkite job environment
|
||||
export PATH="/usr/local/bin:/usr/local/sbin:/opt/homebrew/bin:/opt/homebrew/sbin:$PATH"
|
||||
export HOMEBREW_NO_INSTALL_CLEANUP=1
|
||||
export HOMEBREW_NO_AUTO_UPDATE=1
|
||||
export HOMEBREW_NO_ANALYTICS=1
|
||||
export CI=1
|
||||
export BUILDKITE=true
|
||||
|
||||
# Development environment
|
||||
export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"
|
||||
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
|
||||
|
||||
# Node.js and npm
|
||||
export NODE_OPTIONS="--max-old-space-size=8192"
|
||||
export NPM_CONFIG_CACHE="$HOME/.npm"
|
||||
|
||||
# Rust
|
||||
export CARGO_HOME="$HOME/.cargo"
|
||||
export RUSTUP_HOME="$HOME/.rustup"
|
||||
export PATH="$HOME/.cargo/bin:$PATH"
|
||||
|
||||
# Go
|
||||
export GOPATH="$HOME/go"
|
||||
export PATH="$GOPATH/bin:$PATH"
|
||||
|
||||
# Python
|
||||
export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"
|
||||
|
||||
# Bun
|
||||
export BUN_INSTALL="$HOME/.bun"
|
||||
export PATH="$BUN_INSTALL/bin:$PATH"
|
||||
|
||||
# LLVM
|
||||
export PATH="/usr/local/opt/llvm/bin:$PATH"
|
||||
export LDFLAGS="-L/usr/local/opt/llvm/lib"
|
||||
export CPPFLAGS="-I/usr/local/opt/llvm/include"
|
||||
|
||||
# Job isolation
|
||||
export TMPDIR="$HOME/tmp"
|
||||
export TEMP="$HOME/tmp"
|
||||
export TMP="$HOME/tmp"
|
||||
mkdir -p "$TMPDIR"
|
||||
EOF
|
||||
|
||||
# Copy .zshrc to other shell profiles
|
||||
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bash_profile"
|
||||
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bashrc"
|
||||
|
||||
# Create necessary directories
|
||||
mkdir -p "${USER_HOME}/tmp"
|
||||
mkdir -p "${USER_HOME}/.npm"
|
||||
mkdir -p "${USER_HOME}/.cargo"
|
||||
mkdir -p "${USER_HOME}/.rustup"
|
||||
mkdir -p "${USER_HOME}/go"
|
||||
mkdir -p "${USER_HOME}/.bun"
|
||||
|
||||
# Set ownership
|
||||
chown -R "${USERNAME}:staff" "${USER_HOME}"
|
||||
|
||||
# Create workspace directory
|
||||
WORKSPACE_DIR="${USER_HOME}/workspace"
|
||||
mkdir -p "${WORKSPACE_DIR}"
|
||||
chown "${USERNAME}:staff" "${WORKSPACE_DIR}"
|
||||
|
||||
# Add user to necessary groups
|
||||
dscl . append /Groups/admin GroupMembership "${USERNAME}"
|
||||
dscl . append /Groups/wheel GroupMembership "${USERNAME}"
|
||||
dscl . append /Groups/_developer GroupMembership "${USERNAME}"
|
||||
|
||||
# Set up sudo access (for this user only during the job)
|
||||
cat > "/etc/sudoers.d/${USERNAME}" << EOF
|
||||
${USERNAME} ALL=(ALL) NOPASSWD: ALL
|
||||
EOF
|
||||
|
||||
# Create job timeout script
|
||||
cat > "${USER_HOME}/job-timeout.sh" << 'EOF'
|
||||
#!/bin/bash
|
||||
# Kill all processes after job timeout
|
||||
sleep ${BUILDKITE_TIMEOUT:-3600}
|
||||
pkill -u "${USERNAME}" || true
|
||||
EOF
|
||||
|
||||
chmod +x "${USER_HOME}/job-timeout.sh"
|
||||
chown "${USERNAME}:staff" "${USER_HOME}/job-timeout.sh"
|
||||
|
||||
print "Build user ${USERNAME} created successfully"
|
||||
print "Home directory: ${USER_HOME}"
|
||||
print "Workspace directory: ${WORKSPACE_DIR}"
|
||||
|
||||
# Output user info for the calling script
|
||||
echo "BK_USER=${USERNAME}"
|
||||
echo "BK_HOME=${USER_HOME}"
|
||||
echo "BK_WORKSPACE=${WORKSPACE_DIR}"
|
||||
echo "BK_UID=${NEXT_UID}"
|
||||
242
.buildkite/macos-runners/scripts/job-runner.sh
Executable file
242
.buildkite/macos-runners/scripts/job-runner.sh
Executable file
@@ -0,0 +1,242 @@
|
||||
#!/bin/bash
|
||||
# Main job runner script that manages the lifecycle of Buildkite jobs
|
||||
# This script creates users, runs jobs, and cleans up afterward
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "ERROR: $*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Ensure running as root
|
||||
if [[ $EUID -ne 0 ]]; then
|
||||
error "This script must be run as root"
|
||||
fi
|
||||
|
||||
# Configuration
|
||||
BUILDKITE_AGENT_TOKEN="${BUILDKITE_AGENT_TOKEN:-}"
|
||||
BUILDKITE_QUEUE="${BUILDKITE_QUEUE:-default}"
|
||||
BUILDKITE_TAGS="${BUILDKITE_TAGS:-queue=$BUILDKITE_QUEUE,os=macos,arch=$(uname -m)}"
|
||||
LOG_DIR="/usr/local/var/log/buildkite-agent"
|
||||
AGENT_CONFIG_DIR="/usr/local/var/buildkite-agent"
|
||||
|
||||
# Ensure directories exist
|
||||
mkdir -p "$LOG_DIR"
|
||||
mkdir -p "$AGENT_CONFIG_DIR"
|
||||
|
||||
# Function to cleanup on exit
|
||||
cleanup() {
|
||||
local exit_code=$?
|
||||
print "Job runner exiting with code $exit_code"
|
||||
|
||||
# Clean up current user if set
|
||||
if [[ -n "${CURRENT_USER:-}" ]]; then
|
||||
print "Cleaning up user: $CURRENT_USER"
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$CURRENT_USER" || true
|
||||
fi
|
||||
|
||||
# Kill any remaining buildkite-agent processes
|
||||
pkill -f "buildkite-agent" || true
|
||||
|
||||
exit $exit_code
|
||||
}
|
||||
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
# Function to run a single job
|
||||
run_job() {
|
||||
local job_id="$1"
|
||||
local user_info
|
||||
|
||||
print "Starting job: $job_id"
|
||||
|
||||
# Create isolated user for this job
|
||||
print "Creating isolated build user..."
|
||||
user_info=$(/usr/local/bin/bun-ci/create-build-user.sh)
|
||||
|
||||
# Parse user info
|
||||
export BK_USER=$(echo "$user_info" | grep "BK_USER=" | cut -d= -f2)
|
||||
export BK_HOME=$(echo "$user_info" | grep "BK_HOME=" | cut -d= -f2)
|
||||
export BK_WORKSPACE=$(echo "$user_info" | grep "BK_WORKSPACE=" | cut -d= -f2)
|
||||
export BK_UID=$(echo "$user_info" | grep "BK_UID=" | cut -d= -f2)
|
||||
|
||||
CURRENT_USER="$BK_USER"
|
||||
|
||||
print "Job will run as user: $BK_USER"
|
||||
print "Workspace: $BK_WORKSPACE"
|
||||
|
||||
# Create job-specific configuration
|
||||
local job_config="${AGENT_CONFIG_DIR}/buildkite-agent-${job_id}.cfg"
|
||||
cat > "$job_config" << EOF
|
||||
token="${BUILDKITE_AGENT_TOKEN}"
|
||||
name="macos-$(hostname)-${job_id}"
|
||||
tags="${BUILDKITE_TAGS}"
|
||||
build-path="${BK_WORKSPACE}"
|
||||
hooks-path="/usr/local/bin/bun-ci/hooks"
|
||||
plugins-path="${BK_HOME}/.buildkite-agent/plugins"
|
||||
git-clean-flags="-fdq"
|
||||
git-clone-flags="-v"
|
||||
shell="/bin/bash -l"
|
||||
spawn=1
|
||||
priority=normal
|
||||
disconnect-after-job=true
|
||||
disconnect-after-idle-timeout=300
|
||||
cancel-grace-period=10
|
||||
enable-job-log-tmpfile=true
|
||||
job-log-tmpfile-path="/tmp/buildkite-job-${job_id}.log"
|
||||
timestamp-lines=true
|
||||
EOF
|
||||
|
||||
# Set permissions
|
||||
chown "$BK_USER:staff" "$job_config"
|
||||
chmod 600 "$job_config"
|
||||
|
||||
# Start timeout monitor in background
|
||||
(
|
||||
sleep "${BUILDKITE_TIMEOUT:-3600}"
|
||||
print "Job timeout reached, killing all processes for user $BK_USER"
|
||||
pkill -TERM -u "$BK_USER" || true
|
||||
sleep 10
|
||||
pkill -KILL -u "$BK_USER" || true
|
||||
) &
|
||||
local timeout_pid=$!
|
||||
|
||||
# Run buildkite-agent as the isolated user
|
||||
print "Starting Buildkite agent for job $job_id..."
|
||||
|
||||
local agent_exit_code=0
|
||||
sudo -u "$BK_USER" -H /usr/local/bin/buildkite-agent start \
|
||||
--config "$job_config" \
|
||||
--log-level info \
|
||||
--no-color \
|
||||
2>&1 | tee -a "$LOG_DIR/job-${job_id}.log" || agent_exit_code=$?
|
||||
|
||||
# Kill timeout monitor
|
||||
kill $timeout_pid 2>/dev/null || true
|
||||
|
||||
print "Job $job_id completed with exit code: $agent_exit_code"
|
||||
|
||||
# Clean up job-specific files
|
||||
rm -f "$job_config"
|
||||
rm -f "/tmp/buildkite-job-${job_id}.log"
|
||||
|
||||
# Clean up the user
|
||||
print "Cleaning up user $BK_USER..."
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$BK_USER" || true
|
||||
CURRENT_USER=""
|
||||
|
||||
return $agent_exit_code
|
||||
}
|
||||
|
||||
# Function to wait for jobs
|
||||
wait_for_jobs() {
|
||||
print "Waiting for Buildkite jobs..."
|
||||
|
||||
# Check for required configuration
|
||||
if [[ -z "$BUILDKITE_AGENT_TOKEN" ]]; then
|
||||
error "BUILDKITE_AGENT_TOKEN is required"
|
||||
fi
|
||||
|
||||
# Main loop to handle jobs
|
||||
while true; do
|
||||
# Generate unique job ID
|
||||
local job_id=$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)
|
||||
|
||||
print "Ready to accept job with ID: $job_id"
|
||||
|
||||
# Try to run a job
|
||||
if ! run_job "$job_id"; then
|
||||
print "Job $job_id failed, continuing..."
|
||||
fi
|
||||
|
||||
# Brief pause before accepting next job
|
||||
sleep 5
|
||||
|
||||
# Clean up any remaining processes
|
||||
print "Performing system cleanup..."
|
||||
pkill -f "buildkite-agent" || true
|
||||
|
||||
# Clean up temporary files
|
||||
find /tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
|
||||
find /var/tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
|
||||
|
||||
# Clean up any orphaned users (safety net)
|
||||
for user in $(dscl . list /Users | grep "^bk-"); do
|
||||
if [[ -n "$user" ]]; then
|
||||
print "Cleaning up orphaned user: $user"
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
|
||||
fi
|
||||
done
|
||||
|
||||
# Free up memory
|
||||
sync
|
||||
purge || true
|
||||
|
||||
print "System cleanup completed, ready for next job"
|
||||
done
|
||||
}
|
||||
|
||||
# Function to perform health checks
|
||||
health_check() {
|
||||
print "Performing health check..."
|
||||
|
||||
# Check disk space
|
||||
local disk_usage=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
if [[ $disk_usage -gt 90 ]]; then
|
||||
error "Disk usage is too high: ${disk_usage}%"
|
||||
fi
|
||||
|
||||
# Check memory
|
||||
local memory_pressure=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
|
||||
if [[ $memory_pressure -lt 10 ]]; then
|
||||
error "Memory pressure is too high: ${memory_pressure}% free"
|
||||
fi
|
||||
|
||||
# Check if Docker is running
|
||||
if ! pgrep -x "Docker" > /dev/null; then
|
||||
print "Docker is not running, attempting to start..."
|
||||
open -a Docker || true
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
# Check if required commands are available
|
||||
local required_commands=("git" "node" "npm" "bun" "python3" "go" "rustc" "cargo" "cmake" "make")
|
||||
for cmd in "${required_commands[@]}"; do
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
error "Required command not found: $cmd"
|
||||
fi
|
||||
done
|
||||
|
||||
print "Health check passed"
|
||||
}
|
||||
|
||||
# Main execution
|
||||
case "${1:-start}" in
|
||||
start)
|
||||
print "Starting Buildkite job runner for macOS"
|
||||
health_check
|
||||
wait_for_jobs
|
||||
;;
|
||||
health)
|
||||
health_check
|
||||
;;
|
||||
cleanup)
|
||||
print "Performing manual cleanup..."
|
||||
# Clean up any existing users
|
||||
for user in $(dscl . list /Users | grep "^bk-"); do
|
||||
if [[ -n "$user" ]]; then
|
||||
print "Cleaning up user: $user"
|
||||
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
|
||||
fi
|
||||
done
|
||||
print "Manual cleanup completed"
|
||||
;;
|
||||
*)
|
||||
error "Usage: $0 {start|health|cleanup}"
|
||||
;;
|
||||
esac
|
||||
433
.buildkite/macos-runners/terraform/main.tf
Normal file
433
.buildkite/macos-runners/terraform/main.tf
Normal file
@@ -0,0 +1,433 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
macstadium = {
|
||||
source = "macstadium/macstadium"
|
||||
version = "~> 1.0"
|
||||
}
|
||||
}
|
||||
|
||||
backend "s3" {
|
||||
bucket = "bun-terraform-state"
|
||||
key = "macos-runners/terraform.tfstate"
|
||||
region = "us-west-2"
|
||||
}
|
||||
}
|
||||
|
||||
provider "macstadium" {
|
||||
api_key = var.macstadium_api_key
|
||||
endpoint = var.macstadium_endpoint
|
||||
}
|
||||
|
||||
# Variables
|
||||
variable "macstadium_api_key" {
|
||||
description = "MacStadium API key"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "macstadium_endpoint" {
|
||||
description = "MacStadium API endpoint"
|
||||
type = string
|
||||
default = "https://api.macstadium.com"
|
||||
}
|
||||
|
||||
variable "buildkite_agent_token" {
|
||||
description = "Buildkite agent token"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "github_token" {
|
||||
description = "GitHub token for accessing private repositories"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "image_name_prefix" {
|
||||
description = "Prefix for VM image names"
|
||||
type = string
|
||||
default = "bun-macos"
|
||||
}
|
||||
|
||||
variable "fleet_size" {
|
||||
description = "Number of VMs per macOS version"
|
||||
type = object({
|
||||
macos_13 = number
|
||||
macos_14 = number
|
||||
macos_15 = number
|
||||
})
|
||||
default = {
|
||||
macos_13 = 4
|
||||
macos_14 = 6
|
||||
macos_15 = 8
|
||||
}
|
||||
}
|
||||
|
||||
variable "vm_configuration" {
|
||||
description = "VM configuration settings"
|
||||
type = object({
|
||||
cpu_count = number
|
||||
memory_gb = number
|
||||
disk_size = number
|
||||
})
|
||||
default = {
|
||||
cpu_count = 12
|
||||
memory_gb = 32
|
||||
disk_size = 500
|
||||
}
|
||||
}
|
||||
|
||||
# Data sources to get latest images
|
||||
data "macstadium_image" "macos_13" {
|
||||
name_regex = "^${var.image_name_prefix}-13-.*"
|
||||
most_recent = true
|
||||
}
|
||||
|
||||
data "macstadium_image" "macos_14" {
|
||||
name_regex = "^${var.image_name_prefix}-14-.*"
|
||||
most_recent = true
|
||||
}
|
||||
|
||||
data "macstadium_image" "macos_15" {
|
||||
name_regex = "^${var.image_name_prefix}-15-.*"
|
||||
most_recent = true
|
||||
}
|
||||
|
||||
# Local values
|
||||
locals {
|
||||
common_tags = {
|
||||
Project = "bun-ci"
|
||||
Environment = "production"
|
||||
ManagedBy = "terraform"
|
||||
Purpose = "buildkite-runners"
|
||||
}
|
||||
|
||||
vm_configs = {
|
||||
macos_13 = {
|
||||
image_id = data.macstadium_image.macos_13.id
|
||||
count = var.fleet_size.macos_13
|
||||
version = "13"
|
||||
}
|
||||
macos_14 = {
|
||||
image_id = data.macstadium_image.macos_14.id
|
||||
count = var.fleet_size.macos_14
|
||||
version = "14"
|
||||
}
|
||||
macos_15 = {
|
||||
image_id = data.macstadium_image.macos_15.id
|
||||
count = var.fleet_size.macos_15
|
||||
version = "15"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# VM instances for each macOS version
|
||||
resource "macstadium_vm" "runners" {
|
||||
for_each = {
|
||||
for vm_combo in flatten([
|
||||
for version, config in local.vm_configs : [
|
||||
for i in range(config.count) : {
|
||||
key = "${version}-${i + 1}"
|
||||
version = version
|
||||
config = config
|
||||
index = i + 1
|
||||
}
|
||||
]
|
||||
]) : vm_combo.key => vm_combo
|
||||
}
|
||||
|
||||
name = "bun-runner-${each.value.version}-${each.value.index}"
|
||||
image_id = each.value.config.image_id
|
||||
|
||||
cpu_count = var.vm_configuration.cpu_count
|
||||
memory_gb = var.vm_configuration.memory_gb
|
||||
disk_size = var.vm_configuration.disk_size
|
||||
|
||||
# Network configuration
|
||||
network_interface {
|
||||
network_id = macstadium_network.runner_network.id
|
||||
ip_address = cidrhost(macstadium_network.runner_network.cidr_block, 10 + index(keys(local.vm_configs), each.value.version) * 100 + each.value.index)
|
||||
}
|
||||
|
||||
# Enable GPU passthrough for better performance
|
||||
gpu_passthrough = true
|
||||
|
||||
# Enable VNC for debugging
|
||||
vnc_enabled = true
|
||||
|
||||
# SSH configuration
|
||||
ssh_keys = [macstadium_ssh_key.runner_key.id]
|
||||
|
||||
# Startup script
|
||||
user_data = templatefile("${path.module}/user-data.sh", {
|
||||
buildkite_agent_token = var.buildkite_agent_token
|
||||
github_token = var.github_token
|
||||
macos_version = each.value.version
|
||||
vm_name = "bun-runner-${each.value.version}-${each.value.index}"
|
||||
})
|
||||
|
||||
# Auto-start VM
|
||||
auto_start = true
|
||||
|
||||
# Shutdown behavior
|
||||
auto_shutdown = false
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-${each.value.version}-${each.value.index}"
|
||||
MacOSVersion = each.value.version
|
||||
VmIndex = each.value.index
|
||||
})
|
||||
}
|
||||
|
||||
# Network configuration
|
||||
resource "macstadium_network" "runner_network" {
|
||||
name = "bun-runner-network"
|
||||
cidr_block = "10.0.0.0/16"
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-network"
|
||||
})
|
||||
}
|
||||
|
||||
# SSH key for VM access
|
||||
resource "macstadium_ssh_key" "runner_key" {
|
||||
name = "bun-runner-key"
|
||||
public_key = file("${path.module}/ssh-keys/bun-runner.pub")
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-key"
|
||||
})
|
||||
}
|
||||
|
||||
# Security group for runner VMs
|
||||
resource "macstadium_security_group" "runner_sg" {
|
||||
name = "bun-runner-sg"
|
||||
description = "Security group for Bun CI runner VMs"
|
||||
|
||||
# SSH access
|
||||
ingress {
|
||||
from_port = 22
|
||||
to_port = 22
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
# VNC access (for debugging)
|
||||
ingress {
|
||||
from_port = 5900
|
||||
to_port = 5999
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["10.0.0.0/16"]
|
||||
}
|
||||
|
||||
# HTTP/HTTPS outbound
|
||||
egress {
|
||||
from_port = 80
|
||||
to_port = 80
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 443
|
||||
to_port = 443
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
# Git (SSH)
|
||||
egress {
|
||||
from_port = 22
|
||||
to_port = 22
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
# DNS
|
||||
egress {
|
||||
from_port = 53
|
||||
to_port = 53
|
||||
protocol = "tcp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
egress {
|
||||
from_port = 53
|
||||
to_port = 53
|
||||
protocol = "udp"
|
||||
cidr_blocks = ["0.0.0.0/0"]
|
||||
}
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-sg"
|
||||
})
|
||||
}
|
||||
|
||||
# Load balancer for distributing jobs
|
||||
resource "macstadium_load_balancer" "runner_lb" {
|
||||
name = "bun-runner-lb"
|
||||
load_balancer_type = "application"
|
||||
|
||||
# Health check configuration
|
||||
health_check {
|
||||
enabled = true
|
||||
healthy_threshold = 2
|
||||
unhealthy_threshold = 3
|
||||
timeout = 5
|
||||
interval = 30
|
||||
path = "/health"
|
||||
port = 8080
|
||||
protocol = "HTTP"
|
||||
}
|
||||
|
||||
# Target group for all runner VMs
|
||||
target_group {
|
||||
name = "bun-runners"
|
||||
port = 8080
|
||||
protocol = "HTTP"
|
||||
|
||||
targets = [
|
||||
for vm in macstadium_vm.runners : {
|
||||
id = vm.id
|
||||
port = 8080
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-lb"
|
||||
})
|
||||
}
|
||||
|
||||
# Auto-scaling configuration
|
||||
resource "macstadium_autoscaling_group" "runner_asg" {
|
||||
name = "bun-runner-asg"
|
||||
min_size = 2
|
||||
max_size = 20
|
||||
desired_capacity = sum(values(var.fleet_size))
|
||||
health_check_type = "ELB"
|
||||
health_check_grace_period = 300
|
||||
|
||||
# Launch template reference
|
||||
launch_template {
|
||||
id = macstadium_launch_template.runner_template.id
|
||||
version = "$Latest"
|
||||
}
|
||||
|
||||
# Scaling policies
|
||||
target_group_arns = [macstadium_load_balancer.runner_lb.target_group[0].arn]
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-asg"
|
||||
})
|
||||
}
|
||||
|
||||
# Launch template for auto-scaling
|
||||
resource "macstadium_launch_template" "runner_template" {
|
||||
name = "bun-runner-template"
|
||||
image_id = data.macstadium_image.macos_15.id
|
||||
instance_type = "mac-mini-m2-pro"
|
||||
|
||||
key_name = macstadium_ssh_key.runner_key.name
|
||||
|
||||
security_group_ids = [macstadium_security_group.runner_sg.id]
|
||||
|
||||
user_data = base64encode(templatefile("${path.module}/user-data.sh", {
|
||||
buildkite_agent_token = var.buildkite_agent_token
|
||||
github_token = var.github_token
|
||||
macos_version = "15"
|
||||
vm_name = "bun-runner-asg-${timestamp()}"
|
||||
}))
|
||||
|
||||
tags = merge(local.common_tags, {
|
||||
Name = "bun-runner-template"
|
||||
})
|
||||
}
|
||||
|
||||
# CloudWatch alarms for scaling
|
||||
resource "macstadium_cloudwatch_metric_alarm" "scale_up" {
|
||||
alarm_name = "bun-runner-scale-up"
|
||||
comparison_operator = "GreaterThanThreshold"
|
||||
evaluation_periods = "2"
|
||||
metric_name = "CPUUtilization"
|
||||
namespace = "AWS/EC2"
|
||||
period = "300"
|
||||
statistic = "Average"
|
||||
threshold = "80"
|
||||
alarm_description = "This metric monitors ec2 cpu utilization"
|
||||
alarm_actions = [macstadium_autoscaling_policy.scale_up.arn]
|
||||
|
||||
dimensions = {
|
||||
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
}
|
||||
|
||||
resource "macstadium_cloudwatch_metric_alarm" "scale_down" {
|
||||
alarm_name = "bun-runner-scale-down"
|
||||
comparison_operator = "LessThanThreshold"
|
||||
evaluation_periods = "2"
|
||||
metric_name = "CPUUtilization"
|
||||
namespace = "AWS/EC2"
|
||||
period = "300"
|
||||
statistic = "Average"
|
||||
threshold = "20"
|
||||
alarm_description = "This metric monitors ec2 cpu utilization"
|
||||
alarm_actions = [macstadium_autoscaling_policy.scale_down.arn]
|
||||
|
||||
dimensions = {
|
||||
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
}
|
||||
|
||||
# Scaling policies
|
||||
resource "macstadium_autoscaling_policy" "scale_up" {
|
||||
name = "bun-runner-scale-up"
|
||||
scaling_adjustment = 2
|
||||
adjustment_type = "ChangeInCapacity"
|
||||
cooldown = 300
|
||||
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
|
||||
resource "macstadium_autoscaling_policy" "scale_down" {
|
||||
name = "bun-runner-scale-down"
|
||||
scaling_adjustment = -1
|
||||
adjustment_type = "ChangeInCapacity"
|
||||
cooldown = 300
|
||||
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
|
||||
# Outputs
|
||||
output "vm_instances" {
|
||||
description = "Details of created VM instances"
|
||||
value = {
|
||||
for key, vm in macstadium_vm.runners : key => {
|
||||
id = vm.id
|
||||
name = vm.name
|
||||
ip_address = vm.network_interface[0].ip_address
|
||||
image_id = vm.image_id
|
||||
status = vm.status
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output "load_balancer_dns" {
|
||||
description = "DNS name of the load balancer"
|
||||
value = macstadium_load_balancer.runner_lb.dns_name
|
||||
}
|
||||
|
||||
output "network_id" {
|
||||
description = "ID of the runner network"
|
||||
value = macstadium_network.runner_network.id
|
||||
}
|
||||
|
||||
output "security_group_id" {
|
||||
description = "ID of the runner security group"
|
||||
value = macstadium_security_group.runner_sg.id
|
||||
}
|
||||
|
||||
output "autoscaling_group_name" {
|
||||
description = "Name of the autoscaling group"
|
||||
value = macstadium_autoscaling_group.runner_asg.name
|
||||
}
|
||||
245
.buildkite/macos-runners/terraform/outputs.tf
Normal file
245
.buildkite/macos-runners/terraform/outputs.tf
Normal file
@@ -0,0 +1,245 @@
|
||||
# VM instance outputs
|
||||
output "vm_instances" {
|
||||
description = "Details of all created VM instances"
|
||||
value = {
|
||||
for key, vm in macstadium_vm.runners : key => {
|
||||
id = vm.id
|
||||
name = vm.name
|
||||
ip_address = vm.network_interface[0].ip_address
|
||||
image_id = vm.image_id
|
||||
status = vm.status
|
||||
macos_version = regex("macos-([0-9]+)", key)[0]
|
||||
instance_type = vm.instance_type
|
||||
cpu_count = vm.cpu_count
|
||||
memory_gb = vm.memory_gb
|
||||
disk_size = vm.disk_size
|
||||
created_at = vm.created_at
|
||||
updated_at = vm.updated_at
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output "vm_instances_by_version" {
|
||||
description = "VM instances grouped by macOS version"
|
||||
value = {
|
||||
for version in ["13", "14", "15"] : "macos_${version}" => {
|
||||
for key, vm in macstadium_vm.runners : key => {
|
||||
id = vm.id
|
||||
name = vm.name
|
||||
ip_address = vm.network_interface[0].ip_address
|
||||
status = vm.status
|
||||
}
|
||||
if can(regex("^${version}-", key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Network outputs
|
||||
output "network_details" {
|
||||
description = "Network configuration details"
|
||||
value = {
|
||||
network_id = macstadium_network.runner_network.id
|
||||
cidr_block = macstadium_network.runner_network.cidr_block
|
||||
name = macstadium_network.runner_network.name
|
||||
status = macstadium_network.runner_network.status
|
||||
}
|
||||
}
|
||||
|
||||
output "security_group_details" {
|
||||
description = "Security group configuration details"
|
||||
value = {
|
||||
security_group_id = macstadium_security_group.runner_sg.id
|
||||
name = macstadium_security_group.runner_sg.name
|
||||
description = macstadium_security_group.runner_sg.description
|
||||
ingress_rules = macstadium_security_group.runner_sg.ingress
|
||||
egress_rules = macstadium_security_group.runner_sg.egress
|
||||
}
|
||||
}
|
||||
|
||||
# Load balancer outputs
|
||||
output "load_balancer_details" {
|
||||
description = "Load balancer configuration details"
|
||||
value = {
|
||||
dns_name = macstadium_load_balancer.runner_lb.dns_name
|
||||
zone_id = macstadium_load_balancer.runner_lb.zone_id
|
||||
load_balancer_type = macstadium_load_balancer.runner_lb.load_balancer_type
|
||||
target_group_arn = macstadium_load_balancer.runner_lb.target_group[0].arn
|
||||
health_check = macstadium_load_balancer.runner_lb.health_check[0]
|
||||
}
|
||||
}
|
||||
|
||||
# Auto-scaling outputs
|
||||
output "autoscaling_details" {
|
||||
description = "Auto-scaling group configuration details"
|
||||
value = {
|
||||
asg_name = macstadium_autoscaling_group.runner_asg.name
|
||||
min_size = macstadium_autoscaling_group.runner_asg.min_size
|
||||
max_size = macstadium_autoscaling_group.runner_asg.max_size
|
||||
desired_capacity = macstadium_autoscaling_group.runner_asg.desired_capacity
|
||||
launch_template = macstadium_autoscaling_group.runner_asg.launch_template[0]
|
||||
}
|
||||
}
|
||||
|
||||
# SSH key outputs
|
||||
output "ssh_key_details" {
|
||||
description = "SSH key configuration details"
|
||||
value = {
|
||||
key_name = macstadium_ssh_key.runner_key.name
|
||||
fingerprint = macstadium_ssh_key.runner_key.fingerprint
|
||||
key_pair_id = macstadium_ssh_key.runner_key.id
|
||||
}
|
||||
}
|
||||
|
||||
# Image outputs
|
||||
output "image_details" {
|
||||
description = "Details of images used for VM creation"
|
||||
value = {
|
||||
macos_13 = {
|
||||
id = data.macstadium_image.macos_13.id
|
||||
name = data.macstadium_image.macos_13.name
|
||||
description = data.macstadium_image.macos_13.description
|
||||
created_date = data.macstadium_image.macos_13.creation_date
|
||||
size = data.macstadium_image.macos_13.size
|
||||
}
|
||||
macos_14 = {
|
||||
id = data.macstadium_image.macos_14.id
|
||||
name = data.macstadium_image.macos_14.name
|
||||
description = data.macstadium_image.macos_14.description
|
||||
created_date = data.macstadium_image.macos_14.creation_date
|
||||
size = data.macstadium_image.macos_14.size
|
||||
}
|
||||
macos_15 = {
|
||||
id = data.macstadium_image.macos_15.id
|
||||
name = data.macstadium_image.macos_15.name
|
||||
description = data.macstadium_image.macos_15.description
|
||||
created_date = data.macstadium_image.macos_15.creation_date
|
||||
size = data.macstadium_image.macos_15.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Fleet statistics
|
||||
output "fleet_statistics" {
|
||||
description = "Statistics about the VM fleet"
|
||||
value = {
|
||||
total_vms = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
])
|
||||
vms_by_version = {
|
||||
macos_13 = var.fleet_size.macos_13
|
||||
macos_14 = var.fleet_size.macos_14
|
||||
macos_15 = var.fleet_size.macos_15
|
||||
}
|
||||
total_cpu_cores = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * var.vm_configuration.cpu_count
|
||||
total_memory_gb = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * var.vm_configuration.memory_gb
|
||||
total_disk_gb = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * var.vm_configuration.disk_size
|
||||
}
|
||||
}
|
||||
|
||||
# Connection information
|
||||
output "connection_info" {
|
||||
description = "Information for connecting to the infrastructure"
|
||||
value = {
|
||||
ssh_command_template = "ssh -i ~/.ssh/bun-runner admin@{vm_ip_address}"
|
||||
vnc_port_range = "5900-5999"
|
||||
health_check_url = "http://{vm_ip_address}:8080/health"
|
||||
buildkite_tags = "queue=macos,os=macos,arch=$(uname -m)"
|
||||
}
|
||||
}
|
||||
|
||||
# Resource ARNs and IDs
|
||||
output "resource_arns" {
|
||||
description = "ARNs and IDs of created resources"
|
||||
value = {
|
||||
vm_ids = [
|
||||
for vm in macstadium_vm.runners : vm.id
|
||||
]
|
||||
network_id = macstadium_network.runner_network.id
|
||||
security_group_id = macstadium_security_group.runner_sg.id
|
||||
load_balancer_arn = macstadium_load_balancer.runner_lb.arn
|
||||
autoscaling_group_arn = macstadium_autoscaling_group.runner_asg.arn
|
||||
launch_template_id = macstadium_launch_template.runner_template.id
|
||||
}
|
||||
}
|
||||
|
||||
# Monitoring and alerting
|
||||
output "monitoring_endpoints" {
|
||||
description = "Monitoring and alerting endpoints"
|
||||
value = {
|
||||
cloudwatch_namespace = "BunCI/MacOSRunners"
|
||||
alarm_arns = [
|
||||
macstadium_cloudwatch_metric_alarm.scale_up.arn,
|
||||
macstadium_cloudwatch_metric_alarm.scale_down.arn
|
||||
]
|
||||
scaling_policy_arns = [
|
||||
macstadium_autoscaling_policy.scale_up.arn,
|
||||
macstadium_autoscaling_policy.scale_down.arn
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
# Cost information
|
||||
output "cost_information" {
|
||||
description = "Cost-related information"
|
||||
value = {
|
||||
estimated_hourly_cost = format("$%.2f", sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * 0.50) # Estimated cost per hour per VM
|
||||
estimated_monthly_cost = format("$%.2f", sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
]) * 0.50 * 24 * 30) # Estimated monthly cost
|
||||
cost_optimization_enabled = var.cost_optimization.enable_spot_instances
|
||||
}
|
||||
}
|
||||
|
||||
# Terraform state information
|
||||
output "terraform_state" {
|
||||
description = "Terraform state information"
|
||||
value = {
|
||||
workspace = terraform.workspace
|
||||
terraform_version = "~> 1.0"
|
||||
provider_versions = {
|
||||
macstadium = "~> 1.0"
|
||||
}
|
||||
last_updated = timestamp()
|
||||
}
|
||||
}
|
||||
|
||||
# Summary output for easy reference
|
||||
output "deployment_summary" {
|
||||
description = "Summary of the deployment"
|
||||
value = {
|
||||
project_name = var.project_name
|
||||
environment = var.environment
|
||||
region = var.region
|
||||
total_vms = sum([
|
||||
var.fleet_size.macos_13,
|
||||
var.fleet_size.macos_14,
|
||||
var.fleet_size.macos_15
|
||||
])
|
||||
load_balancer_dns = macstadium_load_balancer.runner_lb.dns_name
|
||||
autoscaling_enabled = var.autoscaling_enabled
|
||||
backup_enabled = var.backup_config.enable_snapshots
|
||||
monitoring_enabled = var.monitoring_config.enable_cloudwatch
|
||||
deployment_time = timestamp()
|
||||
status = "deployed"
|
||||
}
|
||||
}
|
||||
266
.buildkite/macos-runners/terraform/user-data.sh
Normal file
266
.buildkite/macos-runners/terraform/user-data.sh
Normal file
@@ -0,0 +1,266 @@
|
||||
#!/bin/bash
|
||||
# User data script for macOS VM initialization
|
||||
# This script runs when the VM starts up
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Variables passed from Terraform
|
||||
BUILDKITE_AGENT_TOKEN="${buildkite_agent_token}"
|
||||
GITHUB_TOKEN="${github_token}"
|
||||
MACOS_VERSION="${macos_version}"
|
||||
VM_NAME="${vm_name}"
|
||||
|
||||
# Logging
|
||||
LOG_FILE="/var/log/vm-init.log"
|
||||
exec 1> >(tee -a "$LOG_FILE")
|
||||
exec 2> >(tee -a "$LOG_FILE" >&2)
|
||||
|
||||
print() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
|
||||
}
|
||||
|
||||
print "Starting VM initialization for $VM_NAME (macOS $MACOS_VERSION)"
|
||||
|
||||
# Wait for system to be ready
|
||||
print "Waiting for system to be ready..."
|
||||
until ping -c1 google.com &>/dev/null; do
|
||||
sleep 10
|
||||
done
|
||||
|
||||
# Set timezone
|
||||
print "Setting timezone to UTC..."
|
||||
sudo systemsetup -settimezone UTC
|
||||
|
||||
# Configure hostname
|
||||
print "Setting hostname to $VM_NAME..."
|
||||
sudo scutil --set HostName "$VM_NAME"
|
||||
sudo scutil --set LocalHostName "$VM_NAME"
|
||||
sudo scutil --set ComputerName "$VM_NAME"
|
||||
|
||||
# Update system
|
||||
print "Checking for system updates..."
|
||||
sudo softwareupdate -i -a --no-scan || true
|
||||
|
||||
# Configure Buildkite agent
|
||||
print "Configuring Buildkite agent..."
|
||||
mkdir -p /usr/local/var/buildkite-agent
|
||||
mkdir -p /usr/local/var/log/buildkite-agent
|
||||
|
||||
# Create Buildkite agent configuration
|
||||
cat > /usr/local/var/buildkite-agent/buildkite-agent.cfg << EOF
|
||||
token="$BUILDKITE_AGENT_TOKEN"
|
||||
name="$VM_NAME"
|
||||
tags="queue=macos,os=macos,arch=$(uname -m),version=$MACOS_VERSION,hostname=$VM_NAME"
|
||||
build-path="/Users/buildkite/workspace"
|
||||
hooks-path="/usr/local/bin/bun-ci/hooks"
|
||||
plugins-path="/Users/buildkite/.buildkite-agent/plugins"
|
||||
git-clean-flags="-fdq"
|
||||
git-clone-flags="-v"
|
||||
shell="/bin/bash -l"
|
||||
spawn=1
|
||||
priority=normal
|
||||
disconnect-after-job=false
|
||||
disconnect-after-idle-timeout=0
|
||||
cancel-grace-period=10
|
||||
enable-job-log-tmpfile=true
|
||||
timestamp-lines=true
|
||||
EOF
|
||||
|
||||
# Set up GitHub token for private repositories
|
||||
print "Configuring GitHub access..."
|
||||
if [[ -n "$GITHUB_TOKEN" ]]; then
|
||||
# Configure git to use the token
|
||||
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "https://github.com/"
|
||||
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "git@github.com:"
|
||||
|
||||
# Configure npm to use the token
|
||||
npm config set @oven-sh:registry https://npm.pkg.github.com/
|
||||
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> ~/.npmrc
|
||||
fi
|
||||
|
||||
# Set up SSH keys for GitHub (if available)
|
||||
if [[ -f "/usr/local/etc/ssh/github_rsa" ]]; then
|
||||
print "Configuring SSH keys for GitHub..."
|
||||
mkdir -p ~/.ssh
|
||||
cp /usr/local/etc/ssh/github_rsa ~/.ssh/
|
||||
cp /usr/local/etc/ssh/github_rsa.pub ~/.ssh/
|
||||
chmod 600 ~/.ssh/github_rsa
|
||||
chmod 644 ~/.ssh/github_rsa.pub
|
||||
|
||||
# Configure SSH to use the key
|
||||
cat > ~/.ssh/config << EOF
|
||||
Host github.com
|
||||
HostName github.com
|
||||
User git
|
||||
IdentityFile ~/.ssh/github_rsa
|
||||
StrictHostKeyChecking no
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Create health check endpoint
|
||||
print "Setting up health check endpoint..."
|
||||
cat > /usr/local/bin/health-check.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# Health check script for load balancer
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Check if system is ready
|
||||
if ! ping -c1 google.com &>/dev/null; then
|
||||
echo "Network not ready"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check disk space
|
||||
DISK_USAGE=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
|
||||
if [[ $DISK_USAGE -gt 95 ]]; then
|
||||
echo "Disk usage too high: ${DISK_USAGE}%"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check memory
|
||||
MEMORY_PRESSURE=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
|
||||
if [[ $MEMORY_PRESSURE -lt 5 ]]; then
|
||||
echo "Memory pressure too high: ${MEMORY_PRESSURE}% free"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if required services are running
|
||||
if ! pgrep -f "job-runner.sh" > /dev/null; then
|
||||
echo "Job runner not running"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "OK"
|
||||
exit 0
|
||||
EOF
|
||||
|
||||
chmod +x /usr/local/bin/health-check.sh
|
||||
|
||||
# Start simple HTTP server for health checks
|
||||
print "Starting health check server..."
|
||||
cat > /usr/local/bin/health-server.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# Simple HTTP server for health checks
|
||||
|
||||
PORT=8080
|
||||
while true; do
|
||||
echo -e "HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\n$(/usr/local/bin/health-check.sh)" | nc -l -p $PORT
|
||||
done
|
||||
EOF
|
||||
|
||||
chmod +x /usr/local/bin/health-server.sh
|
||||
|
||||
# Create LaunchDaemon for health check server
|
||||
cat > /Library/LaunchDaemons/com.bun.health-server.plist << 'EOF'
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.bun.health-server</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/health-server.sh</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
<key>StandardOutPath</key>
|
||||
<string>/var/log/health-server.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/var/log/health-server.error.log</string>
|
||||
</dict>
|
||||
</plist>
|
||||
EOF
|
||||
|
||||
# Load and start the health check server
|
||||
sudo launchctl load /Library/LaunchDaemons/com.bun.health-server.plist
|
||||
sudo launchctl start com.bun.health-server
|
||||
|
||||
# Configure log rotation
|
||||
print "Configuring log rotation..."
|
||||
cat > /etc/newsyslog.d/bun-ci.conf << 'EOF'
|
||||
# Log rotation for Bun CI
|
||||
/usr/local/var/log/buildkite-agent/*.log 644 5 1000 * GZ
|
||||
/var/log/vm-init.log 644 5 1000 * GZ
|
||||
/var/log/health-server.log 644 5 1000 * GZ
|
||||
/var/log/health-server.error.log 644 5 1000 * GZ
|
||||
EOF
|
||||
|
||||
# Restart syslog to pick up new configuration
|
||||
sudo launchctl unload /System/Library/LaunchDaemons/com.apple.syslogd.plist
|
||||
sudo launchctl load /System/Library/LaunchDaemons/com.apple.syslogd.plist
|
||||
|
||||
# Configure system monitoring
|
||||
print "Setting up system monitoring..."
|
||||
cat > /usr/local/bin/system-monitor.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# System monitoring script
|
||||
|
||||
LOG_FILE="/var/log/system-monitor.log"
|
||||
|
||||
while true; do
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] System Stats:" >> "$LOG_FILE"
|
||||
echo " CPU: $(top -l 1 -n 0 | grep "CPU usage" | awk '{print $3}' | sed 's/%//')" >> "$LOG_FILE"
|
||||
echo " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')" >> "$LOG_FILE"
|
||||
echo " Disk: $(df -h / | awk 'NR==2 {print $5}')" >> "$LOG_FILE"
|
||||
echo " Load: $(uptime | awk -F'load averages:' '{print $2}')" >> "$LOG_FILE"
|
||||
echo " Processes: $(ps aux | wc -l)" >> "$LOG_FILE"
|
||||
echo "" >> "$LOG_FILE"
|
||||
|
||||
sleep 300 # 5 minutes
|
||||
done
|
||||
EOF
|
||||
|
||||
chmod +x /usr/local/bin/system-monitor.sh
|
||||
|
||||
# Create LaunchDaemon for system monitoring
|
||||
cat > /Library/LaunchDaemons/com.bun.system-monitor.plist << 'EOF'
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.bun.system-monitor</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/system-monitor.sh</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
EOF
|
||||
|
||||
# Load and start the system monitor
|
||||
sudo launchctl load /Library/LaunchDaemons/com.bun.system-monitor.plist
|
||||
sudo launchctl start com.bun.system-monitor
|
||||
|
||||
# Final configuration
|
||||
print "Performing final configuration..."
|
||||
|
||||
# Ensure all services are running
|
||||
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
|
||||
sudo launchctl start com.buildkite.buildkite-agent
|
||||
|
||||
# Create marker file to indicate initialization is complete
|
||||
touch /var/tmp/vm-init-complete
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S'): VM initialization completed" >> /var/tmp/vm-init-complete
|
||||
|
||||
print "VM initialization completed successfully!"
|
||||
print "VM Name: $VM_NAME"
|
||||
print "macOS Version: $MACOS_VERSION"
|
||||
print "Status: Ready for Buildkite jobs"
|
||||
|
||||
# Log final system state
|
||||
print "Final system state:"
|
||||
print " Hostname: $(hostname)"
|
||||
print " Uptime: $(uptime)"
|
||||
print " Disk usage: $(df -h / | awk 'NR==2 {print $5}')"
|
||||
print " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')"
|
||||
|
||||
print "Health check available at: http://$(hostname):8080/health"
|
||||
302
.buildkite/macos-runners/terraform/variables.tf
Normal file
302
.buildkite/macos-runners/terraform/variables.tf
Normal file
@@ -0,0 +1,302 @@
|
||||
# Core infrastructure variables
|
||||
variable "project_name" {
|
||||
description = "Name of the project"
|
||||
type = string
|
||||
default = "bun-ci"
|
||||
}
|
||||
|
||||
variable "environment" {
|
||||
description = "Environment name"
|
||||
type = string
|
||||
default = "production"
|
||||
}
|
||||
|
||||
variable "region" {
|
||||
description = "MacStadium region"
|
||||
type = string
|
||||
default = "us-west-1"
|
||||
}
|
||||
|
||||
# MacStadium configuration
|
||||
variable "macstadium_api_key" {
|
||||
description = "MacStadium API key"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "macstadium_endpoint" {
|
||||
description = "MacStadium API endpoint"
|
||||
type = string
|
||||
default = "https://api.macstadium.com"
|
||||
}
|
||||
|
||||
# Buildkite configuration
|
||||
variable "buildkite_agent_token" {
|
||||
description = "Buildkite agent token"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "buildkite_org" {
|
||||
description = "Buildkite organization slug"
|
||||
type = string
|
||||
default = "bun"
|
||||
}
|
||||
|
||||
variable "buildkite_queues" {
|
||||
description = "Buildkite queues to register agents with"
|
||||
type = list(string)
|
||||
default = ["macos", "macos-arm64", "macos-x86_64"]
|
||||
}
|
||||
|
||||
# GitHub configuration
|
||||
variable "github_token" {
|
||||
description = "GitHub token for accessing private repositories"
|
||||
type = string
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "github_org" {
|
||||
description = "GitHub organization"
|
||||
type = string
|
||||
default = "oven-sh"
|
||||
}
|
||||
|
||||
# VM fleet configuration
|
||||
variable "fleet_size" {
|
||||
description = "Number of VMs per macOS version"
|
||||
type = object({
|
||||
macos_13 = number
|
||||
macos_14 = number
|
||||
macos_15 = number
|
||||
})
|
||||
default = {
|
||||
macos_13 = 4
|
||||
macos_14 = 6
|
||||
macos_15 = 8
|
||||
}
|
||||
|
||||
validation {
|
||||
condition = alltrue([
|
||||
var.fleet_size.macos_13 >= 0,
|
||||
var.fleet_size.macos_14 >= 0,
|
||||
var.fleet_size.macos_15 >= 0,
|
||||
var.fleet_size.macos_13 + var.fleet_size.macos_14 + var.fleet_size.macos_15 > 0
|
||||
])
|
||||
error_message = "Fleet sizes must be non-negative and at least one version must have VMs."
|
||||
}
|
||||
}
|
||||
|
||||
variable "vm_configuration" {
|
||||
description = "VM configuration settings"
|
||||
type = object({
|
||||
cpu_count = number
|
||||
memory_gb = number
|
||||
disk_size = number
|
||||
})
|
||||
default = {
|
||||
cpu_count = 12
|
||||
memory_gb = 32
|
||||
disk_size = 500
|
||||
}
|
||||
|
||||
validation {
|
||||
condition = alltrue([
|
||||
var.vm_configuration.cpu_count >= 4,
|
||||
var.vm_configuration.memory_gb >= 16,
|
||||
var.vm_configuration.disk_size >= 100
|
||||
])
|
||||
error_message = "VM configuration must have at least 4 CPUs, 16GB memory, and 100GB disk."
|
||||
}
|
||||
}
|
||||
|
||||
# Auto-scaling configuration
|
||||
variable "autoscaling_enabled" {
|
||||
description = "Enable auto-scaling for VM fleet"
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "autoscaling_config" {
|
||||
description = "Auto-scaling configuration"
|
||||
type = object({
|
||||
min_size = number
|
||||
max_size = number
|
||||
desired_capacity = number
|
||||
scale_up_threshold = number
|
||||
scale_down_threshold = number
|
||||
scale_up_adjustment = number
|
||||
scale_down_adjustment = number
|
||||
cooldown_period = number
|
||||
})
|
||||
default = {
|
||||
min_size = 2
|
||||
max_size = 30
|
||||
desired_capacity = 10
|
||||
scale_up_threshold = 80
|
||||
scale_down_threshold = 20
|
||||
scale_up_adjustment = 2
|
||||
scale_down_adjustment = 1
|
||||
cooldown_period = 300
|
||||
}
|
||||
}
|
||||
|
||||
# Image configuration
|
||||
variable "image_name_prefix" {
|
||||
description = "Prefix for VM image names"
|
||||
type = string
|
||||
default = "bun-macos"
|
||||
}
|
||||
|
||||
variable "image_rebuild_schedule" {
|
||||
description = "Cron schedule for rebuilding images"
|
||||
type = string
|
||||
default = "0 2 * * *" # Daily at 2 AM
|
||||
}
|
||||
|
||||
variable "image_retention_days" {
|
||||
description = "Number of days to retain old images"
|
||||
type = number
|
||||
default = 7
|
||||
}
|
||||
|
||||
# Network configuration
|
||||
variable "network_config" {
|
||||
description = "Network configuration"
|
||||
type = object({
|
||||
cidr_block = string
|
||||
enable_nat = bool
|
||||
enable_vpn = bool
|
||||
allowed_cidrs = list(string)
|
||||
})
|
||||
default = {
|
||||
cidr_block = "10.0.0.0/16"
|
||||
enable_nat = true
|
||||
enable_vpn = false
|
||||
allowed_cidrs = ["0.0.0.0/0"]
|
||||
}
|
||||
}
|
||||
|
||||
# Security configuration
|
||||
variable "security_config" {
|
||||
description = "Security configuration"
|
||||
type = object({
|
||||
enable_ssh_access = bool
|
||||
enable_vnc_access = bool
|
||||
ssh_allowed_cidrs = list(string)
|
||||
vnc_allowed_cidrs = list(string)
|
||||
enable_disk_encryption = bool
|
||||
})
|
||||
default = {
|
||||
enable_ssh_access = true
|
||||
enable_vnc_access = true
|
||||
ssh_allowed_cidrs = ["0.0.0.0/0"]
|
||||
vnc_allowed_cidrs = ["10.0.0.0/16"]
|
||||
enable_disk_encryption = true
|
||||
}
|
||||
}
|
||||
|
||||
# Monitoring configuration
|
||||
variable "monitoring_config" {
|
||||
description = "Monitoring configuration"
|
||||
type = object({
|
||||
enable_cloudwatch = bool
|
||||
enable_custom_metrics = bool
|
||||
log_retention_days = number
|
||||
alert_email = string
|
||||
})
|
||||
default = {
|
||||
enable_cloudwatch = true
|
||||
enable_custom_metrics = true
|
||||
log_retention_days = 30
|
||||
alert_email = "devops@oven.sh"
|
||||
}
|
||||
}
|
||||
|
||||
# Backup configuration
|
||||
variable "backup_config" {
|
||||
description = "Backup configuration"
|
||||
type = object({
|
||||
enable_snapshots = bool
|
||||
snapshot_schedule = string
|
||||
snapshot_retention = number
|
||||
enable_cross_region = bool
|
||||
})
|
||||
default = {
|
||||
enable_snapshots = true
|
||||
snapshot_schedule = "0 4 * * *" # Daily at 4 AM
|
||||
snapshot_retention = 7
|
||||
enable_cross_region = false
|
||||
}
|
||||
}
|
||||
|
||||
# Cost optimization
|
||||
variable "cost_optimization" {
|
||||
description = "Cost optimization settings"
|
||||
type = object({
|
||||
enable_spot_instances = bool
|
||||
spot_price_max = number
|
||||
enable_hibernation = bool
|
||||
idle_shutdown_timeout = number
|
||||
})
|
||||
default = {
|
||||
enable_spot_instances = false
|
||||
spot_price_max = 0.0
|
||||
enable_hibernation = false
|
||||
idle_shutdown_timeout = 3600 # 1 hour
|
||||
}
|
||||
}
|
||||
|
||||
# Maintenance configuration
|
||||
variable "maintenance_config" {
|
||||
description = "Maintenance configuration"
|
||||
type = object({
|
||||
maintenance_window_start = string
|
||||
maintenance_window_end = string
|
||||
auto_update_enabled = bool
|
||||
patch_schedule = string
|
||||
})
|
||||
default = {
|
||||
maintenance_window_start = "02:00"
|
||||
maintenance_window_end = "06:00"
|
||||
auto_update_enabled = true
|
||||
patch_schedule = "0 3 * * 0" # Weekly on Sunday at 3 AM
|
||||
}
|
||||
}
|
||||
|
||||
# Tagging
|
||||
variable "tags" {
|
||||
description = "Additional tags to apply to resources"
|
||||
type = map(string)
|
||||
default = {}
|
||||
}
|
||||
|
||||
# SSH key configuration
|
||||
variable "ssh_key_name" {
|
||||
description = "Name of the SSH key pair"
|
||||
type = string
|
||||
default = "bun-runner-key"
|
||||
}
|
||||
|
||||
variable "ssh_public_key_path" {
|
||||
description = "Path to the SSH public key file"
|
||||
type = string
|
||||
default = "~/.ssh/id_rsa.pub"
|
||||
}
|
||||
|
||||
# Feature flags
|
||||
variable "feature_flags" {
|
||||
description = "Feature flags for experimental features"
|
||||
type = object({
|
||||
enable_gpu_passthrough = bool
|
||||
enable_nested_virt = bool
|
||||
enable_secure_boot = bool
|
||||
enable_tpm = bool
|
||||
})
|
||||
default = {
|
||||
enable_gpu_passthrough = true
|
||||
enable_nested_virt = false
|
||||
enable_secure_boot = false
|
||||
enable_tpm = false
|
||||
}
|
||||
}
|
||||
@@ -1,464 +0,0 @@
|
||||
# Windows Code Signing Script for Bun
|
||||
# Uses DigiCert KeyLocker for Authenticode signing
|
||||
# Native PowerShell implementation - no path translation issues
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunProfileExe,
|
||||
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunExe
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
|
||||
# Logging functions
|
||||
function Log-Info {
|
||||
param([string]$Message)
|
||||
Write-Host "[INFO] $Message" -ForegroundColor Cyan
|
||||
}
|
||||
|
||||
function Log-Success {
|
||||
param([string]$Message)
|
||||
Write-Host "[SUCCESS] $Message" -ForegroundColor Green
|
||||
}
|
||||
|
||||
function Log-Error {
|
||||
param([string]$Message)
|
||||
Write-Host "[ERROR] $Message" -ForegroundColor Red
|
||||
}
|
||||
|
||||
function Log-Debug {
|
||||
param([string]$Message)
|
||||
if ($env:DEBUG -eq "true" -or $env:DEBUG -eq "1") {
|
||||
Write-Host "[DEBUG] $Message" -ForegroundColor Gray
|
||||
}
|
||||
}
|
||||
|
||||
# Load Visual Studio environment if not already loaded
|
||||
function Ensure-VSEnvironment {
|
||||
if ($null -eq $env:VSINSTALLDIR) {
|
||||
Log-Info "Loading Visual Studio environment..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
$vsDir = & $vswhere -prerelease -latest -property installationPath
|
||||
if ($null -eq $vsDir) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
|
||||
if ($null -eq $vsDir) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
Log-Success "Visual Studio environment loaded"
|
||||
}
|
||||
|
||||
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
}
|
||||
}
|
||||
|
||||
# Check for required environment variables
|
||||
function Check-Environment {
|
||||
Log-Info "Checking environment variables..."
|
||||
|
||||
$required = @{
|
||||
"SM_API_KEY" = $env:SM_API_KEY
|
||||
"SM_CLIENT_CERT_PASSWORD" = $env:SM_CLIENT_CERT_PASSWORD
|
||||
"SM_KEYPAIR_ALIAS" = $env:SM_KEYPAIR_ALIAS
|
||||
"SM_HOST" = $env:SM_HOST
|
||||
"SM_CLIENT_CERT_FILE" = $env:SM_CLIENT_CERT_FILE
|
||||
}
|
||||
|
||||
$missing = @()
|
||||
foreach ($key in $required.Keys) {
|
||||
if ([string]::IsNullOrEmpty($required[$key])) {
|
||||
$missing += $key
|
||||
} else {
|
||||
Log-Debug "$key is set (length: $($required[$key].Length))"
|
||||
}
|
||||
}
|
||||
|
||||
if ($missing.Count -gt 0) {
|
||||
throw "Missing required environment variables: $($missing -join ', ')"
|
||||
}
|
||||
|
||||
Log-Success "All required environment variables are present"
|
||||
}
|
||||
|
||||
# Setup certificate file
|
||||
function Setup-Certificate {
|
||||
Log-Info "Setting up certificate..."
|
||||
|
||||
# Always try to decode as base64 first
|
||||
# If it fails, then treat as file path
|
||||
try {
|
||||
Log-Info "Attempting to decode certificate as base64..."
|
||||
Log-Debug "Input string length: $($env:SM_CLIENT_CERT_FILE.Length) characters"
|
||||
|
||||
$tempCertPath = Join-Path $env:TEMP "digicert_cert_$(Get-Random).p12"
|
||||
|
||||
# Try to decode as base64
|
||||
$certBytes = [System.Convert]::FromBase64String($env:SM_CLIENT_CERT_FILE)
|
||||
[System.IO.File]::WriteAllBytes($tempCertPath, $certBytes)
|
||||
|
||||
# Validate the decoded certificate size
|
||||
$fileSize = (Get-Item $tempCertPath).Length
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Decoded certificate too small: $fileSize bytes (expected >100 bytes)"
|
||||
}
|
||||
|
||||
# Update environment to point to file
|
||||
$env:SM_CLIENT_CERT_FILE = $tempCertPath
|
||||
|
||||
Log-Success "Certificate decoded and written to: $tempCertPath"
|
||||
Log-Debug "Decoded certificate file size: $fileSize bytes"
|
||||
|
||||
# Register cleanup
|
||||
$global:TEMP_CERT_PATH = $tempCertPath
|
||||
|
||||
} catch {
|
||||
# If base64 decode fails, check if it's a file path
|
||||
Log-Info "Base64 decode failed, checking if it's a file path..."
|
||||
Log-Debug "Decode error: $_"
|
||||
|
||||
if (Test-Path $env:SM_CLIENT_CERT_FILE) {
|
||||
$fileSize = (Get-Item $env:SM_CLIENT_CERT_FILE).Length
|
||||
|
||||
# Validate file size
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Certificate file too small: $fileSize bytes at $env:SM_CLIENT_CERT_FILE (possibly corrupted)"
|
||||
}
|
||||
|
||||
Log-Info "Using certificate file: $env:SM_CLIENT_CERT_FILE"
|
||||
Log-Debug "Certificate file size: $fileSize bytes"
|
||||
} else {
|
||||
throw "SM_CLIENT_CERT_FILE is neither valid base64 nor an existing file: $env:SM_CLIENT_CERT_FILE"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Install DigiCert KeyLocker tools
|
||||
function Install-KeyLocker {
|
||||
Log-Info "Setting up DigiCert KeyLocker tools..."
|
||||
|
||||
# Define our controlled installation directory
|
||||
$installDir = "C:\BuildTools\DigiCert"
|
||||
$smctlPath = Join-Path $installDir "smctl.exe"
|
||||
|
||||
# Check if already installed in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools already installed at: $smctlPath"
|
||||
|
||||
# Add to PATH if not already there
|
||||
if ($env:PATH -notlike "*$installDir*") {
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
}
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
Log-Info "Installing KeyLocker tools to: $installDir"
|
||||
|
||||
# Create the installation directory if it doesn't exist
|
||||
if (!(Test-Path $installDir)) {
|
||||
Log-Info "Creating installation directory: $installDir"
|
||||
try {
|
||||
New-Item -ItemType Directory -Path $installDir -Force | Out-Null
|
||||
Log-Success "Created directory: $installDir"
|
||||
} catch {
|
||||
throw "Failed to create directory $installDir : $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Download MSI installer
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
|
||||
|
||||
Log-Info "Downloading MSI from: $msiUrl"
|
||||
Log-Info "Downloading to: $msiPath"
|
||||
|
||||
try {
|
||||
# Remove existing MSI if present
|
||||
if (Test-Path $msiPath) {
|
||||
Remove-Item $msiPath -Force
|
||||
Log-Debug "Removed existing MSI file"
|
||||
}
|
||||
|
||||
# Download with progress tracking
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($msiUrl, $msiPath)
|
||||
|
||||
if (!(Test-Path $msiPath)) {
|
||||
throw "MSI download failed - file not found"
|
||||
}
|
||||
|
||||
$fileSize = (Get-Item $msiPath).Length
|
||||
Log-Success "MSI downloaded successfully (size: $fileSize bytes)"
|
||||
|
||||
} catch {
|
||||
throw "Failed to download MSI: $_"
|
||||
}
|
||||
|
||||
# Install MSI
|
||||
Log-Info "Installing MSI..."
|
||||
Log-Debug "MSI path: $msiPath"
|
||||
Log-Debug "File exists: $(Test-Path $msiPath)"
|
||||
Log-Debug "File size: $((Get-Item $msiPath).Length) bytes"
|
||||
|
||||
# Check if running as administrator
|
||||
$isAdmin = ([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator)
|
||||
Log-Info "Running as administrator: $isAdmin"
|
||||
|
||||
# Install MSI silently to our controlled directory
|
||||
$arguments = @(
|
||||
"/i", "`"$msiPath`"",
|
||||
"/quiet",
|
||||
"/norestart",
|
||||
"TARGETDIR=`"$installDir`"",
|
||||
"INSTALLDIR=`"$installDir`"",
|
||||
"ACCEPT_EULA=1",
|
||||
"ADDLOCAL=ALL"
|
||||
)
|
||||
|
||||
Log-Debug "Running: msiexec.exe $($arguments -join ' ')"
|
||||
Log-Info "Installing to: $installDir"
|
||||
|
||||
$process = Start-Process -FilePath "msiexec.exe" -ArgumentList $arguments -Wait -PassThru -NoNewWindow
|
||||
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Log-Error "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
|
||||
# Try to get error details from event log
|
||||
try {
|
||||
$events = Get-WinEvent -LogName "Application" -MaxEvents 10 |
|
||||
Where-Object { $_.ProviderName -eq "MsiInstaller" -and $_.TimeCreated -gt (Get-Date).AddMinutes(-1) }
|
||||
|
||||
foreach ($event in $events) {
|
||||
Log-Debug "MSI Event: $($event.Message)"
|
||||
}
|
||||
} catch {
|
||||
Log-Debug "Could not retrieve MSI installation events"
|
||||
}
|
||||
|
||||
throw "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
}
|
||||
|
||||
Log-Success "MSI installation completed"
|
||||
|
||||
# Wait for installation to complete
|
||||
Start-Sleep -Seconds 2
|
||||
|
||||
# Verify smctl.exe exists in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools installed successfully at: $smctlPath"
|
||||
|
||||
# Add to PATH
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
# If not in our expected location, check if it installed somewhere in the directory
|
||||
$found = Get-ChildItem -Path $installDir -Filter "smctl.exe" -Recurse -ErrorAction SilentlyContinue |
|
||||
Select-Object -First 1
|
||||
|
||||
if ($found) {
|
||||
Log-Success "Found smctl.exe at: $($found.FullName)"
|
||||
$smctlDir = $found.DirectoryName
|
||||
$env:PATH = "$smctlDir;$env:PATH"
|
||||
return $found.FullName
|
||||
}
|
||||
|
||||
throw "KeyLocker tools installation succeeded but smctl.exe not found in $installDir"
|
||||
}
|
||||
|
||||
# Configure KeyLocker
|
||||
function Configure-KeyLocker {
|
||||
param([string]$SmctlPath)
|
||||
|
||||
Log-Info "Configuring KeyLocker..."
|
||||
|
||||
# Verify smctl is accessible
|
||||
try {
|
||||
$version = & $SmctlPath --version 2>&1
|
||||
Log-Debug "smctl version: $version"
|
||||
} catch {
|
||||
throw "Failed to run smctl: $_"
|
||||
}
|
||||
|
||||
# Configure KeyLocker credentials and environment
|
||||
Log-Info "Configuring KeyLocker credentials..."
|
||||
|
||||
try {
|
||||
# Save credentials (API key and password)
|
||||
Log-Info "Saving credentials to OS store..."
|
||||
$saveOutput = & $SmctlPath credentials save $env:SM_API_KEY $env:SM_CLIENT_CERT_PASSWORD 2>&1 | Out-String
|
||||
Log-Debug "Credentials save output: $saveOutput"
|
||||
|
||||
if ($saveOutput -like "*Credentials saved*") {
|
||||
Log-Success "Credentials saved successfully"
|
||||
}
|
||||
|
||||
# Set environment variables for smctl
|
||||
Log-Info "Setting KeyLocker environment variables..."
|
||||
$env:SM_HOST = $env:SM_HOST # Already set, but ensure it's available
|
||||
$env:SM_API_KEY = $env:SM_API_KEY # Already set
|
||||
$env:SM_CLIENT_CERT_FILE = $env:SM_CLIENT_CERT_FILE # Path to decoded cert file
|
||||
Log-Debug "SM_HOST: $env:SM_HOST"
|
||||
Log-Debug "SM_CLIENT_CERT_FILE: $env:SM_CLIENT_CERT_FILE"
|
||||
|
||||
# Run health check
|
||||
Log-Info "Running KeyLocker health check..."
|
||||
$healthOutput = & $SmctlPath healthcheck 2>&1 | Out-String
|
||||
Log-Debug "Health check output: $healthOutput"
|
||||
|
||||
if ($healthOutput -like "*Healthy*" -or $healthOutput -like "*SUCCESS*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "KeyLocker health check passed"
|
||||
} else {
|
||||
Log-Error "Health check failed: $healthOutput"
|
||||
# Don't throw here, sometimes healthcheck is flaky but signing still works
|
||||
}
|
||||
|
||||
# Sync certificates to Windows certificate store
|
||||
Log-Info "Syncing certificates to Windows store..."
|
||||
$syncOutput = & $SmctlPath windows certsync 2>&1 | Out-String
|
||||
Log-Debug "Certificate sync output: $syncOutput"
|
||||
|
||||
if ($syncOutput -like "*success*" -or $syncOutput -like "*synced*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "Certificates synced to Windows store"
|
||||
} else {
|
||||
Log-Info "Certificate sync output: $syncOutput"
|
||||
}
|
||||
|
||||
} catch {
|
||||
throw "Failed to configure KeyLocker: $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Sign an executable
|
||||
function Sign-Executable {
|
||||
param(
|
||||
[string]$ExePath,
|
||||
[string]$SmctlPath
|
||||
)
|
||||
|
||||
if (!(Test-Path $ExePath)) {
|
||||
throw "Executable not found: $ExePath"
|
||||
}
|
||||
|
||||
$fileName = Split-Path $ExePath -Leaf
|
||||
Log-Info "Signing $fileName..."
|
||||
Log-Debug "Full path: $ExePath"
|
||||
Log-Debug "File size: $((Get-Item $ExePath).Length) bytes"
|
||||
|
||||
# Check if already signed
|
||||
$existingSig = Get-AuthenticodeSignature $ExePath
|
||||
if ($existingSig.Status -eq "Valid") {
|
||||
Log-Info "$fileName is already signed by: $($existingSig.SignerCertificate.Subject)"
|
||||
Log-Info "Skipping re-signing"
|
||||
return
|
||||
}
|
||||
|
||||
# Sign the executable using smctl
|
||||
try {
|
||||
# smctl sign command with keypair-alias
|
||||
$signArgs = @(
|
||||
"sign",
|
||||
"--keypair-alias", $env:SM_KEYPAIR_ALIAS,
|
||||
"--input", $ExePath,
|
||||
"--verbose"
|
||||
)
|
||||
|
||||
Log-Debug "Running: $SmctlPath $($signArgs -join ' ')"
|
||||
|
||||
$signOutput = & $SmctlPath $signArgs 2>&1 | Out-String
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Log-Error "Signing output: $signOutput"
|
||||
throw "Signing failed with exit code: $LASTEXITCODE"
|
||||
}
|
||||
|
||||
Log-Debug "Signing output: $signOutput"
|
||||
Log-Success "Signing command completed"
|
||||
|
||||
} catch {
|
||||
throw "Failed to sign $fileName : $_"
|
||||
}
|
||||
|
||||
# Verify signature
|
||||
$newSig = Get-AuthenticodeSignature $ExePath
|
||||
|
||||
if ($newSig.Status -eq "Valid") {
|
||||
Log-Success "$fileName signed successfully"
|
||||
Log-Info "Signed by: $($newSig.SignerCertificate.Subject)"
|
||||
Log-Info "Thumbprint: $($newSig.SignerCertificate.Thumbprint)"
|
||||
Log-Info "Valid from: $($newSig.SignerCertificate.NotBefore) to $($newSig.SignerCertificate.NotAfter)"
|
||||
} else {
|
||||
throw "$fileName signature verification failed: $($newSig.Status) - $($newSig.StatusMessage)"
|
||||
}
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
function Cleanup {
|
||||
if ($global:TEMP_CERT_PATH -and (Test-Path $global:TEMP_CERT_PATH)) {
|
||||
try {
|
||||
Remove-Item $global:TEMP_CERT_PATH -Force
|
||||
Log-Info "Cleaned up temporary certificate"
|
||||
} catch {
|
||||
Log-Error "Failed to cleanup temporary certificate: $_"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Main execution
|
||||
try {
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
Write-Host " Windows Code Signing for Bun" -ForegroundColor Cyan
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
|
||||
# Ensure we're in a VS environment
|
||||
Ensure-VSEnvironment
|
||||
|
||||
# Check environment variables
|
||||
Check-Environment
|
||||
|
||||
# Setup certificate
|
||||
Setup-Certificate
|
||||
|
||||
# Install and configure KeyLocker
|
||||
$smctlPath = Install-KeyLocker
|
||||
Configure-KeyLocker -SmctlPath $smctlPath
|
||||
|
||||
# Sign both executables
|
||||
Sign-Executable -ExePath $BunProfileExe -SmctlPath $smctlPath
|
||||
Sign-Executable -ExePath $BunExe -SmctlPath $smctlPath
|
||||
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
Write-Host " Code signing completed successfully!" -ForegroundColor Green
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
|
||||
exit 0
|
||||
|
||||
} catch {
|
||||
Log-Error "Code signing failed: $_"
|
||||
exit 1
|
||||
|
||||
} finally {
|
||||
Cleanup
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
|
||||
description: Find duplicate GitHub issues
|
||||
---
|
||||
|
||||
# Issue deduplication command
|
||||
|
||||
Find up to 3 likely duplicate issues for a given GitHub issue.
|
||||
|
||||
To do this, follow these steps precisely:
|
||||
|
||||
1. Use an agent to check if the GitHub issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicate detection comment (check for the exact HTML marker `<!-- dedupe-bot:marker -->` in the issue comments - ignore other bot comments). If so, do not proceed.
|
||||
2. Use an agent to view a GitHub issue, and ask the agent to return a summary of the issue
|
||||
3. Then, launch 5 parallel agents to search GitHub for duplicates of this issue, using diverse keywords and search approaches, using the summary from Step 2. **IMPORTANT**: Always scope searches with `repo:owner/repo` to constrain results to the current repository only.
|
||||
4. Next, feed the results from Steps 2 and 3 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
|
||||
5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
|
||||
|
||||
Notes (be sure to tell this to your agents, too):
|
||||
|
||||
- Use `gh` to interact with GitHub, rather than web fetch
|
||||
- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
|
||||
- Make a todo list first
|
||||
- Always scope searches with `repo:owner/repo` to prevent cross-repo false positives
|
||||
- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
|
||||
|
||||
---
|
||||
|
||||
Found 3 possible duplicate issues:
|
||||
|
||||
1. <link to issue>
|
||||
2. <link to issue>
|
||||
3. <link to issue>
|
||||
|
||||
This issue will be automatically closed as a duplicate in 3 days.
|
||||
|
||||
- If your issue is a duplicate, please close it and 👍 the existing issue instead
|
||||
- To prevent auto-closure, add a comment or 👎 this comment
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.ai/code)
|
||||
|
||||
<!-- dedupe-bot:marker -->
|
||||
|
||||
---
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { extname } from "path";
|
||||
import { spawnSync } from "child_process";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const filePath = toolInput.file_path;
|
||||
|
||||
// Only process Write, Edit, and MultiEdit tools
|
||||
if (!["Write", "Edit", "MultiEdit"].includes(toolName)) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const ext = extname(filePath);
|
||||
|
||||
// Only format known files
|
||||
if (!filePath) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function formatZigFile() {
|
||||
try {
|
||||
// Format the Zig file
|
||||
const result = spawnSync("vendor/zig/zig.exe", ["fmt", filePath], {
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
console.error(`Failed to format ${filePath}: ${result.error.message}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.error(`zig fmt failed for ${filePath}:`);
|
||||
if (result.stderr) {
|
||||
console.error(result.stderr);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
function formatTypeScriptFile() {
|
||||
try {
|
||||
// Format the TypeScript file
|
||||
const result = spawnSync(
|
||||
"./node_modules/.bin/prettier",
|
||||
["--plugin=prettier-plugin-organize-imports", "--config", ".prettierrc", "--write", filePath],
|
||||
{
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
},
|
||||
);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
if (ext === ".zig") {
|
||||
formatZigFile();
|
||||
} else if (
|
||||
[
|
||||
".cjs",
|
||||
".css",
|
||||
".html",
|
||||
".js",
|
||||
".json",
|
||||
".jsonc",
|
||||
".jsx",
|
||||
".less",
|
||||
".mjs",
|
||||
".pcss",
|
||||
".postcss",
|
||||
".sass",
|
||||
".scss",
|
||||
".styl",
|
||||
".stylus",
|
||||
".toml",
|
||||
".ts",
|
||||
".tsx",
|
||||
".yaml",
|
||||
].includes(ext)
|
||||
) {
|
||||
formatTypeScriptFile();
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { basename, extname } from "path";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const command = toolInput.command || "";
|
||||
const timeout = toolInput.timeout;
|
||||
const cwd = input.cwd || "";
|
||||
|
||||
// Get environment variables from the hook context
|
||||
// Note: We check process.env directly as env vars are inherited
|
||||
let useSystemBun = process.env.USE_SYSTEM_BUN;
|
||||
|
||||
if (toolName !== "Bash" || !command) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function denyWithReason(reason) {
|
||||
const output = {
|
||||
hookSpecificOutput: {
|
||||
hookEventName: "PreToolUse",
|
||||
permissionDecision: "deny",
|
||||
permissionDecisionReason: reason,
|
||||
},
|
||||
};
|
||||
console.log(JSON.stringify(output));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Parse the command to extract argv0 and positional args
|
||||
let tokens;
|
||||
try {
|
||||
// Simple shell parsing - split on spaces but respect quotes (both single and double)
|
||||
tokens = command.match(/(?:[^\s"']+|"[^"]*"|'[^']*')+/g)?.map(t => t.replace(/^['"]|['"]$/g, "")) || [];
|
||||
} catch {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Strip inline environment variable assignments (e.g., FOO=1 bun test)
|
||||
const inlineEnv = new Map();
|
||||
let commandStart = 0;
|
||||
while (
|
||||
commandStart < tokens.length &&
|
||||
/^[A-Za-z_][A-Za-z0-9_]*=/.test(tokens[commandStart]) &&
|
||||
!tokens[commandStart].includes("/")
|
||||
) {
|
||||
const [name, value = ""] = tokens[commandStart].split("=", 2);
|
||||
inlineEnv.set(name, value);
|
||||
commandStart++;
|
||||
}
|
||||
if (commandStart >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
tokens = tokens.slice(commandStart);
|
||||
useSystemBun = inlineEnv.get("USE_SYSTEM_BUN") ?? useSystemBun;
|
||||
|
||||
// Get the executable name (argv0)
|
||||
const argv0 = basename(tokens[0], extname(tokens[0]));
|
||||
|
||||
// Check if it's zig or zig.exe
|
||||
if (argv0 === "zig") {
|
||||
// Filter out flags (starting with -) to get positional arguments
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if the positional args contain "build" followed by "obj"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "build" && positionalArgs[1] === "obj") {
|
||||
denyWithReason("error: Use `bun bd` to build Bun and wait patiently");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if argv0 is timeout and the command is "bun bd"
|
||||
if (argv0 === "timeout") {
|
||||
// Find the actual command after timeout and its arguments
|
||||
const timeoutArgEndIndex = tokens.slice(1).findIndex(t => !t.startsWith("-") && !/^\d/.test(t));
|
||||
if (timeoutArgEndIndex === -1) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommandIndex = timeoutArgEndIndex + 1;
|
||||
if (actualCommandIndex >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommand = basename(tokens[actualCommandIndex]);
|
||||
const restArgs = tokens.slice(actualCommandIndex + 1);
|
||||
|
||||
// Check if it's "bun bd" or "bun-debug bd" without other positional args
|
||||
if (actualCommand === "bun" || actualCommand.includes("bun-debug")) {
|
||||
// Claude is a sneaky fucker
|
||||
let positionalArgs = restArgs.filter(arg => !arg.startsWith("-"));
|
||||
const redirectStderrToStdoutIndex = positionalArgs.findIndex(arg => arg === "2>&1");
|
||||
if (redirectStderrToStdoutIndex !== -1) {
|
||||
positionalArgs.splice(redirectStderrToStdoutIndex, 1);
|
||||
}
|
||||
const redirectStdoutToStderrIndex = positionalArgs.findIndex(arg => arg === "1>&2");
|
||||
if (redirectStdoutToStderrIndex !== -1) {
|
||||
positionalArgs.splice(redirectStdoutToStderrIndex, 1);
|
||||
}
|
||||
|
||||
const redirectToFileIndex = positionalArgs.findIndex(arg => arg === ">");
|
||||
if (redirectToFileIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileIndex, 2);
|
||||
}
|
||||
|
||||
const redirectToFileAppendIndex = positionalArgs.findIndex(arg => arg === ">>");
|
||||
if (redirectToFileAppendIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileAppendIndex, 2);
|
||||
}
|
||||
|
||||
const redirectTOFileInlineIndex = positionalArgs.findIndex(arg => arg.startsWith(">"));
|
||||
if (redirectTOFileInlineIndex !== -1) {
|
||||
positionalArgs.splice(redirectTOFileInlineIndex, 1);
|
||||
}
|
||||
|
||||
const pipeIndex = positionalArgs.findIndex(arg => arg === "|");
|
||||
if (pipeIndex !== -1) {
|
||||
positionalArgs = positionalArgs.slice(0, pipeIndex);
|
||||
}
|
||||
|
||||
positionalArgs = positionalArgs.map(arg => arg.trim()).filter(Boolean);
|
||||
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if command is "bun .* test" or "bun-debug test" with -u/--update-snapshots AND -t/--test-name-pattern
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
|
||||
// Check if "test" is in positional args or "bd" followed by "test"
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
const hasTest = positionalArgs.includes("test") || (positionalArgs[0] === "bd" && positionalArgs[1] === "test");
|
||||
|
||||
if (hasTest) {
|
||||
const hasUpdateSnapshots = allArgs.some(arg => arg === "-u" || arg === "--update-snapshots");
|
||||
const hasTestNamePattern = allArgs.some(arg => arg === "-t" || arg === "--test-name-pattern");
|
||||
|
||||
if (hasUpdateSnapshots && hasTestNamePattern) {
|
||||
denyWithReason("error: Cannot use -u/--update-snapshots with -t/--test-name-pattern");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if timeout option is set for "bun bd" command
|
||||
if (timeout !== undefined && (argv0 === "bun" || argv0.includes("bun-debug"))) {
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun test <file>" without USE_SYSTEM_BUN=1
|
||||
if ((argv0 === "bun" || argv0.includes("bun-debug")) && useSystemBun !== "1") {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "test" (not "bd test")
|
||||
if (positionalArgs.length >= 1 && positionalArgs[0] === "test" && positionalArgs[0] !== "bd") {
|
||||
denyWithReason(
|
||||
"error: In development, use `bun bd test <file>` to test your changes. If you meant to use a release version, set USE_SYSTEM_BUN=1",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun bd test" from bun repo root or test folder without a file path
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "bd test"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "bd" && positionalArgs[1] === "test") {
|
||||
// Check if cwd is the bun repo root or test folder
|
||||
const isBunRepoRoot = cwd === "/workspace/bun" || cwd.endsWith("/bun");
|
||||
const isTestFolder = cwd.endsWith("/bun/test");
|
||||
|
||||
if (isBunRepoRoot || isTestFolder) {
|
||||
// Check if there's a file path argument (looks like a path: contains / or has test extension)
|
||||
const hasFilePath = positionalArgs
|
||||
.slice(2)
|
||||
.some(
|
||||
arg =>
|
||||
arg.includes("/") ||
|
||||
arg.endsWith(".test.ts") ||
|
||||
arg.endsWith(".test.js") ||
|
||||
arg.endsWith(".test.tsx") ||
|
||||
arg.endsWith(".test.jsx"),
|
||||
);
|
||||
|
||||
if (!hasFilePath) {
|
||||
denyWithReason(
|
||||
"error: `bun bd test` from repo root or test folder will run all tests. Use `bun bd test <path>` with a specific test file.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allow the command to proceed
|
||||
process.exit(0);
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/pre-bash-zig-build.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Write|Edit|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/post-edit-zig-format.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
143
.coderabbit.yaml
143
.coderabbit.yaml
@@ -1,143 +0,0 @@
|
||||
language: en-US
|
||||
|
||||
reviews:
|
||||
profile: assertive
|
||||
request_changes_workflow: false
|
||||
high_level_summary: false
|
||||
high_level_summary_placeholder: "@coderabbitai summary"
|
||||
high_level_summary_in_walkthrough: true
|
||||
auto_title_placeholder: "@coderabbitai"
|
||||
review_status: false
|
||||
commit_status: false
|
||||
fail_commit_status: false
|
||||
collapse_walkthrough: false
|
||||
changed_files_summary: true
|
||||
sequence_diagrams: false
|
||||
estimate_code_review_effort: false
|
||||
assess_linked_issues: true
|
||||
related_issues: true
|
||||
related_prs: true
|
||||
suggested_labels: false
|
||||
suggested_reviewers: true
|
||||
in_progress_fortune: false
|
||||
poem: false
|
||||
abort_on_close: true
|
||||
|
||||
path_filters:
|
||||
- "!test/js/node/test/"
|
||||
|
||||
auto_review:
|
||||
enabled: true
|
||||
auto_incremental_review: true
|
||||
drafts: false
|
||||
|
||||
finishing_touches:
|
||||
docstrings:
|
||||
enabled: false
|
||||
unit_tests:
|
||||
enabled: false
|
||||
|
||||
pre_merge_checks:
|
||||
docstrings:
|
||||
mode: off
|
||||
title:
|
||||
mode: warning
|
||||
description:
|
||||
mode: warning
|
||||
issue_assessment:
|
||||
mode: warning
|
||||
|
||||
tools:
|
||||
shellcheck:
|
||||
enabled: true
|
||||
ruff:
|
||||
enabled: true
|
||||
markdownlint:
|
||||
enabled: true
|
||||
github-checks:
|
||||
enabled: true
|
||||
timeout_ms: 90000
|
||||
languagetool:
|
||||
enabled: true
|
||||
enabled_only: false
|
||||
level: default
|
||||
biome:
|
||||
enabled: true
|
||||
hadolint:
|
||||
enabled: true
|
||||
swiftlint:
|
||||
enabled: true
|
||||
phpstan:
|
||||
enabled: true
|
||||
level: default
|
||||
phpmd:
|
||||
enabled: true
|
||||
phpcs:
|
||||
enabled: true
|
||||
golangci-lint:
|
||||
enabled: true
|
||||
yamllint:
|
||||
enabled: true
|
||||
gitleaks:
|
||||
enabled: true
|
||||
checkov:
|
||||
enabled: true
|
||||
detekt:
|
||||
enabled: true
|
||||
eslint:
|
||||
enabled: true
|
||||
flake8:
|
||||
enabled: true
|
||||
rubocop:
|
||||
enabled: true
|
||||
buf:
|
||||
enabled: true
|
||||
regal:
|
||||
enabled: true
|
||||
actionlint:
|
||||
enabled: true
|
||||
pmd:
|
||||
enabled: true
|
||||
clang:
|
||||
enabled: true
|
||||
cppcheck:
|
||||
enabled: true
|
||||
semgrep:
|
||||
enabled: true
|
||||
circleci:
|
||||
enabled: true
|
||||
clippy:
|
||||
enabled: true
|
||||
sqlfluff:
|
||||
enabled: true
|
||||
prismaLint:
|
||||
enabled: true
|
||||
pylint:
|
||||
enabled: true
|
||||
oxc:
|
||||
enabled: true
|
||||
shopifyThemeCheck:
|
||||
enabled: true
|
||||
luacheck:
|
||||
enabled: true
|
||||
brakeman:
|
||||
enabled: true
|
||||
dotenvLint:
|
||||
enabled: true
|
||||
htmlhint:
|
||||
enabled: true
|
||||
checkmake:
|
||||
enabled: true
|
||||
osvScanner:
|
||||
enabled: true
|
||||
|
||||
chat:
|
||||
auto_reply: true
|
||||
|
||||
knowledge_base:
|
||||
opt_out: false
|
||||
code_guidelines:
|
||||
enabled: true
|
||||
filePatterns:
|
||||
- "**/.cursor/rules/*.mdc"
|
||||
- "**/CLAUDE.md"
|
||||
@@ -30,7 +30,7 @@ bun bd <file> <...args>
|
||||
Debug logs look like this:
|
||||
|
||||
```zig
|
||||
const log = bun.Output.scoped(.${SCOPE}, .hidden);
|
||||
const log = bun.Output.scoped(.${SCOPE}, false);
|
||||
|
||||
// ...later
|
||||
log("MY DEBUG LOG", .{})
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -16,7 +16,6 @@
|
||||
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -3,7 +3,3 @@
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
|
||||
2
.github/actions/bump/action.yml
vendored
2
.github/actions/bump/action.yml
vendored
@@ -25,7 +25,7 @@ runs:
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
|
||||
47
.github/pull_request_template.md
vendored
47
.github/pull_request_template.md
vendored
@@ -1,3 +1,50 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
103
.github/workflows/CLAUDE.md
vendored
103
.github/workflows/CLAUDE.md
vendored
@@ -1,103 +0,0 @@
|
||||
# GitHub Actions Workflow Maintenance Guide
|
||||
|
||||
This document provides guidance for maintaining the GitHub Actions workflows in this repository.
|
||||
|
||||
## format.yml Workflow
|
||||
|
||||
### Overview
|
||||
The `format.yml` workflow runs code formatters (Prettier, clang-format, and Zig fmt) on pull requests and pushes to main. It's optimized for speed by running all formatters in parallel.
|
||||
|
||||
### Key Components
|
||||
|
||||
#### 1. Clang-format Script (`scripts/run-clang-format.sh`)
|
||||
- **Purpose**: Formats C++ source and header files
|
||||
- **What it does**:
|
||||
- Reads C++ files from `cmake/sources/CxxSources.txt`
|
||||
- Finds all header files in `src/` and `packages/`
|
||||
- Excludes third-party directories (libuv, napi, deps, vendor, sqlite, etc.)
|
||||
- Requires specific clang-format version (no fallbacks)
|
||||
|
||||
**Important exclusions**:
|
||||
- `src/napi/` - Node API headers (third-party)
|
||||
- `src/bun.js/bindings/libuv/` - libuv headers (third-party)
|
||||
- `src/bun.js/bindings/sqlite/` - SQLite headers (third-party)
|
||||
- `src/bun.js/api/ffi-*.h` - FFI headers (generated/third-party)
|
||||
- `src/deps/` - Dependencies (third-party)
|
||||
- Files in `vendor/`, `third_party/`, `generated/` directories
|
||||
|
||||
#### 2. Parallel Execution
|
||||
The workflow runs all three formatters simultaneously:
|
||||
- Each formatter outputs with a prefix (`[prettier]`, `[clang-format]`, `[zig]`)
|
||||
- Output is streamed in real-time without blocking
|
||||
- Uses GitHub Actions groups (`::group::`) for collapsible sections
|
||||
|
||||
#### 3. Tool Installation
|
||||
|
||||
##### Clang-format-19
|
||||
- Installs ONLY `clang-format-19` package (not the entire LLVM toolchain)
|
||||
- Uses `--no-install-recommends --no-install-suggests` to skip unnecessary packages
|
||||
- Quiet installation with `-qq` and `-o=Dpkg::Use-Pty=0`
|
||||
|
||||
##### Zig
|
||||
- Downloads from `oven-sh/zig` releases (musl build for static linking)
|
||||
- URL: `https://github.com/oven-sh/zig/releases/download/autobuild-{COMMIT}/bootstrap-x86_64-linux-musl.zip`
|
||||
- Extracts to temp directory to avoid polluting the repository
|
||||
- Directory structure: `bootstrap-x86_64-linux-musl/zig`
|
||||
|
||||
### Updating the Workflow
|
||||
|
||||
#### To update Zig version:
|
||||
1. Find the new commit hash from https://github.com/oven-sh/zig/releases
|
||||
2. Replace the hash in the wget URL (line 65 of format.yml)
|
||||
3. Test that the URL is valid and the binary works
|
||||
|
||||
#### To update clang-format version:
|
||||
1. Update `LLVM_VERSION_MAJOR` environment variable at the top of format.yml
|
||||
2. Update the version check in `scripts/run-clang-format.sh`
|
||||
|
||||
#### To add/remove file exclusions:
|
||||
1. Edit the exclusion patterns in `scripts/run-clang-format.sh` (lines 34-39)
|
||||
2. Test locally to ensure the right files are being formatted
|
||||
|
||||
### Performance Optimizations
|
||||
1. **Parallel execution**: All formatters run simultaneously
|
||||
2. **Minimal installations**: Only required packages, no extras
|
||||
3. **Temp directories**: Tools downloaded to temp dirs, cleaned up after use
|
||||
4. **Streaming output**: Real-time feedback without buffering
|
||||
5. **Early start**: Formatting begins immediately after each tool is ready
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**If formatters appear to run sequentially:**
|
||||
- Check if output is being buffered (should use `sed` for line prefixing)
|
||||
- Ensure background processes use `&` and proper wait commands
|
||||
|
||||
**If third-party files are being formatted:**
|
||||
- Review exclusion patterns in `scripts/run-clang-format.sh`
|
||||
- Check if new third-party directories were added that need exclusion
|
||||
|
||||
**If clang-format installation is slow:**
|
||||
- Ensure using minimal package installation flags
|
||||
- Check if apt cache needs updating
|
||||
- Consider caching the clang-format binary between runs
|
||||
|
||||
### Testing Changes Locally
|
||||
|
||||
```bash
|
||||
# Test the clang-format script
|
||||
export LLVM_VERSION_MAJOR=19
|
||||
./scripts/run-clang-format.sh format
|
||||
|
||||
# Test with check mode (no modifications)
|
||||
./scripts/run-clang-format.sh check
|
||||
|
||||
# Test specific file exclusions
|
||||
./scripts/run-clang-format.sh format 2>&1 | grep -E "(libuv|napi|deps)"
|
||||
# Should return nothing if exclusions work correctly
|
||||
```
|
||||
|
||||
### Important Notes
|
||||
- The script defaults to **format** mode (modifies files)
|
||||
- Always test locally before pushing workflow changes
|
||||
- The musl Zig build works on glibc systems due to static linking
|
||||
- Keep the exclusion list updated as new third-party code is added
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
19
.github/workflows/auto-assign-types.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
29
.github/workflows/auto-close-duplicates.yml
vendored
29
.github/workflows/auto-close-duplicates.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Auto-close duplicate issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-close-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: auto-close-duplicates-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Auto-close duplicate issues
|
||||
run: bun run scripts/auto-close-duplicates.ts
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
24
.github/workflows/auto-label-claude-prs.yml
vendored
24
.github/workflows/auto-label-claude-prs.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Auto-label Claude PRs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
auto-label:
|
||||
if: github.event.pull_request.user.login == 'robobun' || contains(github.event.pull_request.body, '🤖 Generated with')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Add claude label to PRs from robobun
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['claude']
|
||||
});
|
||||
34
.github/workflows/claude-dedupe-issues.yml
vendored
34
.github/workflows/claude-dedupe-issues.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Claude Issue Dedupe
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue_number:
|
||||
description: 'Issue number to process for duplicate detection'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
claude-dedupe-issues:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: claude-dedupe-issues-${{ github.event.issue.number || inputs.issue_number }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Claude Code slash command
|
||||
uses: anthropics/claude-code-base-action@beta
|
||||
with:
|
||||
prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
claude_args: "--model claude-sonnet-4-5-20250929"
|
||||
claude_env: |
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
47
.github/workflows/claude.yml
vendored
47
.github/workflows/claude.yml
vendored
@@ -13,54 +13,23 @@ on:
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
github.repository == 'oven-sh/bun' &&
|
||||
(
|
||||
(github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR'))
|
||||
) &&
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: claude
|
||||
env:
|
||||
IS_SANDBOX: 1
|
||||
container:
|
||||
image: localhost:5000/claude-bun:latest
|
||||
options: --privileged --user 1000:1000
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
working-directory: /workspace/bun
|
||||
run: |
|
||||
git config --global user.email "claude-bot@bun.sh" && \
|
||||
git config --global user.name "Claude Bot" && \
|
||||
git config --global url."git@github.com:".insteadOf "https://github.com/" && \
|
||||
git config --global url."git@github.com:".insteadOf "http://github.com/" && \
|
||||
git config --global --add safe.directory /workspace/bun && \
|
||||
git config --global push.default current && \
|
||||
git config --global pull.rebase true && \
|
||||
git config --global init.defaultBranch main && \
|
||||
git config --global core.editor "vim" && \
|
||||
git config --global color.ui auto && \
|
||||
git config --global fetch.prune true && \
|
||||
git config --global diff.colorMoved zebra && \
|
||||
git config --global merge.conflictStyle diff3 && \
|
||||
git config --global rerere.enabled true && \
|
||||
git config --global core.autocrlf input
|
||||
git fetch origin ${{ github.event.pull_request.head.sha }}
|
||||
git checkout ${{ github.event.pull_request.head.ref }}
|
||||
git reset --hard origin/${{ github.event.pull_request.head.ref }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@v1
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
timeout_minutes: "180"
|
||||
claude_args: |
|
||||
--dangerously-skip-permissions
|
||||
--system-prompt "You are working on the Bun codebase"
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
|
||||
22
.github/workflows/docs.yml
vendored
Normal file
22
.github/workflows/docs.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "packages/bun-types/**.d.ts"
|
||||
- "CONTRIBUTING.md"
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
steps:
|
||||
# redeploy Vercel site when a file in `docs` changes
|
||||
# using VERCEL_DEPLOY_HOOK environment variable
|
||||
- name: Trigger Webhook
|
||||
run: |
|
||||
curl -v ${{ secrets.VERCEL_DEPLOY_HOOK }}
|
||||
90
.github/workflows/format.yml
vendored
90
.github/workflows/format.yml
vendored
@@ -8,8 +8,10 @@ on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches: ["main"]
|
||||
env:
|
||||
BUN_VERSION: "1.3.2"
|
||||
BUN_VERSION: "1.2.11"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
@@ -35,75 +37,23 @@ jobs:
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
bun scripts/glob-sources.mjs
|
||||
- name: Format Code
|
||||
- name: Install LLVM
|
||||
run: |
|
||||
# Start prettier in background with prefixed output
|
||||
echo "::group::Prettier"
|
||||
(bun run prettier 2>&1 | sed 's/^/[prettier] /' || echo "[prettier] Failed with exit code $?") &
|
||||
PRETTIER_PID=$!
|
||||
|
||||
# Start clang-format installation and formatting in background with prefixed output
|
||||
echo "::group::Clang-format"
|
||||
(
|
||||
echo "[clang-format] Installing clang-format-${{ env.LLVM_VERSION_MAJOR }}..."
|
||||
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc > /dev/null
|
||||
echo "deb http://apt.llvm.org/$(lsb_release -cs)/ llvm-toolchain-$(lsb_release -cs)-${{ env.LLVM_VERSION_MAJOR }} main" | sudo tee /etc/apt/sources.list.d/llvm.list > /dev/null
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y -qq --no-install-recommends --no-install-suggests -o=Dpkg::Use-Pty=0 clang-format-${{ env.LLVM_VERSION_MAJOR }}
|
||||
echo "[clang-format] Running clang-format..."
|
||||
LLVM_VERSION_MAJOR=${{ env.LLVM_VERSION_MAJOR }} ./scripts/run-clang-format.sh format 2>&1 | sed 's/^/[clang-format] /'
|
||||
) &
|
||||
CLANG_PID=$!
|
||||
|
||||
# Setup Zig in temp directory and run zig fmt in background with prefixed output
|
||||
echo "::group::Zig fmt"
|
||||
(
|
||||
ZIG_TEMP=$(mktemp -d)
|
||||
echo "[zig] Downloading Zig (musl build)..."
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-e0b7c318f318196c5f81fdf3423816a7b5bb3112/bootstrap-x86_64-linux-musl.zip
|
||||
unzip -q -d "$ZIG_TEMP" "$ZIG_TEMP/zig.zip"
|
||||
export PATH="$ZIG_TEMP/bootstrap-x86_64-linux-musl:$PATH"
|
||||
echo "[zig] Running zig fmt..."
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
./scripts/sort-imports.ts src 2>&1 | sed 's/^/[zig] /'
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
rm -rf "$ZIG_TEMP"
|
||||
) &
|
||||
ZIG_PID=$!
|
||||
|
||||
# Wait for all formatting tasks to complete
|
||||
echo ""
|
||||
echo "Running formatters in parallel..."
|
||||
FAILED=0
|
||||
|
||||
if ! wait $PRETTIER_PID; then
|
||||
echo "::error::Prettier failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $CLANG_PID; then
|
||||
echo "::error::Clang-format failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $ZIG_PID; then
|
||||
echo "::error::Zig fmt failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
# Exit with error if any formatter failed
|
||||
if [ $FAILED -eq 1 ]; then
|
||||
echo "::error::One or more formatters failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All formatters completed successfully"
|
||||
- name: Ban Words
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
git rm -f cmake/sources/*.txt || true
|
||||
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
|
||||
zig fmt src
|
||||
bun scripts/sortImports src
|
||||
zig fmt src
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
41
.github/workflows/glob-sources.yml
vendored
Normal file
41
.github/workflows/glob-sources.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Glob Sources
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
|
||||
jobs:
|
||||
glob-sources:
|
||||
name: Glob Sources
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Glob sources
|
||||
run: bun scripts/glob-sources.mjs
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun scripts/glob-sources.mjs`"
|
||||
|
||||
@@ -5,8 +5,6 @@ env:
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
pull_request_target:
|
||||
types: [labeled, opened, reopened, synchronize, unlabeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
@@ -45,46 +43,9 @@ jobs:
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-slop:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'slop')
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Update PR title and body for slop and close
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
title: 'ai slop',
|
||||
body: 'This PR has been marked as AI slop and the description has been updated to avoid confusion or misleading reviewers.\n\nMany AI PRs are fine, but sometimes they submit a PR too early, fail to test if the problem is real, fail to reproduce the problem, or fail to test that the problem is fixed. If you think this PR is not AI slop, please leave a comment.',
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Delete the branch if it's from a fork or if it's not a protected branch
|
||||
try {
|
||||
await github.rest.git.deleteRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: `heads/${pr.data.head.ref}`
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Could not delete branch:', error.message);
|
||||
}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'issues' && (github.event.label.name == 'crash' || github.event.label.name == 'needs repro')
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
@@ -105,16 +66,11 @@ jobs:
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@@ -123,10 +79,6 @@ jobs:
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
@@ -136,32 +88,9 @@ jobs:
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = ${{ fromJson(steps.add-labels.outputs.close-action) }};
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
if: github.event.label.name == 'crash'
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
@@ -195,17 +124,8 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -219,22 +139,8 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
19
.github/workflows/typos.yml
vendored
Normal file
19
.github/workflows/typos.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Typos
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Spellcheck
|
||||
uses: crate-ci/typos@v1.29.4
|
||||
with:
|
||||
files: docs/**/*
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
102
.github/workflows/update-hdrhistogram.yml
vendored
102
.github/workflows/update-hdrhistogram.yml
vendored
@@ -1,102 +0,0 @@
|
||||
name: Update hdrhistogram
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check hdrhistogram version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHdrHistogram.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHdrHistogram.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHdrHistogram.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/HdrHistogram/HdrHistogram_c/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHdrHistogram.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHdrHistogram.cmake
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates hdrhistogram to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
118
.github/workflows/update-highway.yml
vendored
118
.github/workflows/update-highway.yml
vendored
@@ -1,118 +0,0 @@
|
||||
name: Update highway
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check highway version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHighway.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHighway.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHighway.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/google/highway/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHighway.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHighway.cmake
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates highway to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
24
.github/workflows/update-lolhtml.yml
vendored
24
.github/workflows/update-lolhtml.yml
vendored
@@ -50,27 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -92,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
29
.github/workflows/update-lshpack.yml
vendored
29
.github/workflows/update-lshpack.yml
vendored
@@ -50,32 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -97,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
21
.github/workflows/update-sqlite3.yml
vendored
21
.github/workflows/update-sqlite3.yml
vendored
@@ -70,11 +70,28 @@ jobs:
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
./scripts/update-sqlite-amalgamation.sh ${{ steps.check-version.outputs.latest_num }} ${{ steps.check-version.outputs.latest_year }}
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
79
.github/workflows/update-vendor.yml
vendored
79
.github/workflows/update-vendor.yml
vendored
@@ -1,79 +0,0 @@
|
||||
name: Update vendor
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- elysia
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Check version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
|
||||
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
|
||||
|
||||
if [ -z "$current" ]; then
|
||||
echo "Error: Could not find COMMIT line in test/vendor.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$current" >> $GITHUB_OUTPUT
|
||||
echo "repository=$repository" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
test/vendor.json
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
52
.github/workflows/vscode-release.yml
vendored
52
.github/workflows/vscode-release.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
path: packages/bun-vscode/extension/bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -1,9 +1,7 @@
|
||||
.claude/settings.local.json
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.gdb_history
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
@@ -186,9 +184,4 @@ codegen-for-zig-team.tar.gz
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
*.bun-build
|
||||
@@ -7,8 +7,4 @@ src/react-refresh.js
|
||||
*.min.js
|
||||
test/snippets
|
||||
test/js/node/test
|
||||
test/napi/node-napi-tests
|
||||
bun.lock
|
||||
|
||||
# the output codeblocks need to stay minified
|
||||
docs/bundler/minifier.mdx
|
||||
|
||||
@@ -19,12 +19,6 @@
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["src/codegen/bindgenv2/**/*.ts", "*.bindv2.ts"],
|
||||
"options": {
|
||||
"printWidth": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
14
.vscode/launch.json
generated
vendored
14
.vscode/launch.json
generated
vendored
@@ -22,12 +22,6 @@
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
@@ -60,17 +54,11 @@
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"bun.test.customScript": "./build/debug/bun-debug test",
|
||||
"bun.test.customScript": "bun-debug test"
|
||||
}
|
||||
|
||||
158
CLAUDE.md
158
CLAUDE.md
@@ -4,14 +4,18 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build Bun**: `bun bd`
|
||||
- **Build debug version**: `bun bd` or `bun run build:debug`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: no need for a timeout, the build is really fast!
|
||||
- Compilation takes ~2.5 minutes
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -23,83 +27,48 @@ Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.jso
|
||||
|
||||
### Test Organization
|
||||
|
||||
If a test is for a specific numbered GitHub Issue, it should be placed in `test/regression/issue/${issueNumber}.test.ts`. Ensure the issue number is **REAL** and not a placeholder!
|
||||
|
||||
If no valid issue number is provided, find the best existing file to modify instead, such as;
|
||||
|
||||
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
|
||||
- `test/js/node/` - Node.js compatibility tests
|
||||
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
|
||||
- `test/cli/` - CLI command tests (install, run, test, etc.)
|
||||
- `test/bundler/` - Bundler and transpiler tests. Use `itBundled` helper.
|
||||
- `test/regression/issue/` - Regression tests (create one per bug fix)
|
||||
- `test/bundler/` - Bundler and transpiler tests
|
||||
- `test/integration/` - End-to-end integration tests
|
||||
- `test/napi/` - N-API compatibility tests
|
||||
- `test/v8/` - V8 C++ API compatibility tests
|
||||
|
||||
### Writing Tests
|
||||
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures.
|
||||
|
||||
- For **single-file tests**, prefer `-e` over `tempDir`.
|
||||
- For **multi-file tests**, prefer `tempDir` and `Bun.spawn`.
|
||||
Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, normalizeBunSnapshot, tempDir } from "harness";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
test("(single-file test) my feature", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "-e", "console.log('Hello, world!')"],
|
||||
env: bunEnv,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(normalizeBunSnapshot(stdout)).toMatchInlineSnapshot(`"Hello, world!"`);
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test("(multi-file test) my feature", async () => {
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
using dir = tempDir("test-prefix", {
|
||||
"index.js": `import { foo } from "./foo.ts"; foo();`,
|
||||
"foo.ts": `export function foo() { console.log("foo"); }`,
|
||||
const dir = tempDirWithFiles("test-prefix", {
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
// Spawn Bun process
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.js"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stderr: "pipe",
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
// Prefer snapshot tests over expect(stdout).toBe("hello\n");
|
||||
expect(normalizeBunSnapshot(stdout, dir)).toMatchInlineSnapshot(`"hello"`);
|
||||
|
||||
// Assert the exit code last. This gives you a more useful error message on test failure.
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toBe("hello\n");
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
|
||||
- Use `tempDir` from `"harness"` to create a temporary directory. **Do not** use `tmpdirSync` or `fs.mkdtempSync` to create temporary directories.
|
||||
- When spawning processes, tests should expect(stdout).toBe(...) BEFORE expect(exitCode).toBe(0). This gives you a more useful error message on test failure.
|
||||
- **CRITICAL**: Do not write flaky tests. Do not use `setTimeout` in tests. Instead, `await` the condition to be met. You are not testing the TIME PASSING, you are testing the CONDITION.
|
||||
- **CRITICAL**: Verify your test fails with `USE_SYSTEM_BUN=1 bun test <file>` and passes with `bun bd test <file>`. Your test is NOT VALID if it passes with `USE_SYSTEM_BUN=1`.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
@@ -107,7 +76,7 @@ test("(multi-file test) my feature", async () => {
|
||||
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
|
||||
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
|
||||
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources. Bun will automatically rebuild these files when you make changes to them.
|
||||
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
|
||||
|
||||
### Core Source Organization
|
||||
|
||||
@@ -164,6 +133,7 @@ test("(multi-file test) my feature", async () => {
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
@@ -172,6 +142,19 @@ When implementing JavaScript classes in C++:
|
||||
3. Add iso subspaces for classes with C++ fields
|
||||
4. Cache structures in ZigGlobalObject
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Code Formatting
|
||||
|
||||
- `bun run prettier` - Format JS/TS files
|
||||
- `bun run zig-format` - Format Zig files
|
||||
- `bun run clang-format` - Format C++ files
|
||||
|
||||
### Watching for Changes
|
||||
|
||||
- `bun run watch` - Incremental Zig compilation with error checking
|
||||
- `bun run watch-windows` - Windows-specific watch mode
|
||||
|
||||
### Code Generation
|
||||
|
||||
Code generation happens automatically as part of the build process. The main scripts are:
|
||||
@@ -193,19 +176,70 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
- `internal/` - Internal modules not exposed to users
|
||||
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
|
||||
|
||||
### Special Syntax in Built-in Modules
|
||||
|
||||
1. **`$` prefix** - Access to private properties and JSC intrinsics:
|
||||
|
||||
```js
|
||||
const arr = $Array.from(...); // Private global
|
||||
map.$set(...); // Private method
|
||||
const arr2 = $newArrayWithSize(5); // JSC intrinsic
|
||||
```
|
||||
|
||||
2. **`require()`** - Must use string literals, resolved at compile time:
|
||||
|
||||
```js
|
||||
const fs = require("fs"); // Directly loads by numeric ID
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
|
||||
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
|
||||
|
||||
5. **Export syntax**: Use `export default` which gets converted to a return statement:
|
||||
```js
|
||||
export default {
|
||||
readFile,
|
||||
writeFile,
|
||||
};
|
||||
```
|
||||
|
||||
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
|
||||
|
||||
## CI
|
||||
|
||||
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
|
||||
|
||||
```bash
|
||||
bun ci
|
||||
```
|
||||
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **All changes must be tested** - if you're not testing your changes, you're not done.
|
||||
3. **Get your tests to pass**. If you didn't run the tests, your code does not work.
|
||||
4. **Follow existing code style** - check neighboring files for patterns
|
||||
5. **Create tests in the right folder** in `test/` and the test must end in `.test.ts` or `.test.tsx`
|
||||
6. **Use absolute paths** - Always use absolute paths in file operations
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scopeName>=1` to enable specific `Output.scoped(.${scopeName}, .visible)`s
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
|
||||
**ONLY** push up changes after running `bun bd test <file>` and ensuring your tests pass.
|
||||
## Key APIs and Features
|
||||
|
||||
### Bun-Specific APIs
|
||||
|
||||
- **Bun.serve()** - High-performance HTTP server
|
||||
- **Bun.spawn()** - Process spawning with better performance than Node.js
|
||||
- **Bun.file()** - Fast file I/O operations
|
||||
- **Bun.write()** - Unified API for writing to files, stdout, etc.
|
||||
- **Bun.$ (Shell)** - Cross-platform shell scripting
|
||||
- **Bun.SQLite** - Native SQLite integration
|
||||
- **Bun.FFI** - Call native libraries from JavaScript
|
||||
- **Bun.Glob** - Fast file pattern matching
|
||||
|
||||
@@ -24,17 +24,13 @@ if(CMAKE_HOST_APPLE)
|
||||
include(SetupMacSDK)
|
||||
endif()
|
||||
include(SetupLLVM)
|
||||
include(SetupCcache)
|
||||
|
||||
# --- Project ---
|
||||
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
|
||||
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
@@ -47,19 +43,6 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
find_program(SCCACHE_PROGRAM sccache)
|
||||
if(SCCACHE_PROGRAM AND NOT DEFINED ENV{NO_SCCACHE})
|
||||
include(SetupSccache)
|
||||
else()
|
||||
find_program(CCACHE_PROGRAM ccache)
|
||||
if(CCACHE_PROGRAM)
|
||||
include(SetupCcache)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
@@ -2,40 +2,26 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](https://bun.com/docs/project/building-windows)
|
||||
|
||||
## Using Nix (Alternative)
|
||||
|
||||
A Nix flake is provided as an alternative to manual dependency installation:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
# or explicitly use the pure shell
|
||||
# nix develop .#pure
|
||||
export CMAKE_SYSTEM_PROCESSOR=$(uname -m)
|
||||
bun bd
|
||||
```
|
||||
|
||||
This provides all dependencies in an isolated, reproducible environment without requiring sudo.
|
||||
|
||||
## Install Dependencies (Manual)
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby sccache
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ sudo apt install curl wget lsb-release software-properties-common cargo cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S base-devel cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo clang19 llvm19 lld19 cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
@@ -65,44 +51,6 @@ $ brew install bun
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Optional: Install `sccache`
|
||||
|
||||
sccache is used to cache compilation artifacts, significantly speeding up builds. It must be installed with S3 support:
|
||||
|
||||
```bash
|
||||
# For macOS
|
||||
$ brew install sccache
|
||||
|
||||
# For Linux. Note that the version in your package manager may not have S3 support.
|
||||
$ cargo install sccache --features=s3
|
||||
```
|
||||
|
||||
This will install `sccache` with S3 support. Our build scripts will automatically detect and use `sccache` with our shared S3 cache. **Note**: Not all versions of `sccache` are compiled with S3 support, hence we recommend installing it via `cargo`.
|
||||
|
||||
#### Registering AWS Credentials for `sccache` (Core Developers Only)
|
||||
|
||||
Core developers have write access to the shared S3 cache. To enable write access, you must log in with AWS credentials. The easiest way to do this is to use the [`aws` CLI](https://aws.amazon.com/cli/) and invoke [`aws configure` to provide your AWS security info](https://docs.aws.amazon.com/cli/latest/reference/configure/).
|
||||
|
||||
The `cmake` scripts should automatically detect your AWS credentials from the environment or the `~/.aws/credentials` file.
|
||||
|
||||
<details>
|
||||
<summary>Logging in to the `aws` CLI</summary>
|
||||
|
||||
1. Install the AWS CLI by following [the official guide](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html).
|
||||
2. Log in to your AWS account console. A team member should provide you with your credentials.
|
||||
3. Click your name in the top right > Security credentials.
|
||||
4. Scroll to "Access keys" and create a new access key.
|
||||
5. Run `aws configure` in your terminal and provide the access key ID and secret access key when prompted.
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Common Issues You May Encounter</summary>
|
||||
|
||||
- To confirm that the cache is being used, you can use the `sccache --show-stats` command right after a build. This will expose very useful statistics, including cache hits/misses.
|
||||
- If you have multiple AWS profiles configured, ensure that the correct profile is set in the `AWS_PROFILE` environment variable.
|
||||
- `sccache` follows a server-client model. If you run into weird issues where `sccache` refuses to use S3, even though you have AWS credentials configured, try killing any running `sccache` servers with `sccache --stop-server` and then re-running the build.
|
||||
</details>
|
||||
|
||||
## Install LLVM
|
||||
|
||||
Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
@@ -201,7 +149,7 @@ Bun generally takes about 2.5 minutes to compile a debug build when there are Zi
|
||||
- Batch up your changes
|
||||
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
|
||||
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, .hidden)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug logs into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
|
||||
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
|
||||
|
||||
## Code generation scripts
|
||||
@@ -212,7 +160,6 @@ In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/cppbind.ts` -- Generates automatic Zig bindings for C++ functions marked with `[[ZIG_EXPORT]]` attributes.
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
@@ -275,8 +222,8 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
$ bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
@@ -369,6 +316,15 @@ $ bun run build -DUSE_STATIC_LIBATOMIC=OFF
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
### ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
## Using bun-debug
|
||||
|
||||
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
|
||||
|
||||
10
README.md
10
README.md
@@ -54,7 +54,7 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
curl -fsSL https://bun.com/install | bash
|
||||
|
||||
# on windows
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
powershell -c "irm bun.com/install.ps1 | iex"
|
||||
|
||||
# with npm
|
||||
npm install -g bun
|
||||
@@ -104,13 +104,13 @@ bun upgrade --canary
|
||||
- [File types (Loaders)](https://bun.com/docs/runtime/loaders)
|
||||
- [TypeScript](https://bun.com/docs/runtime/typescript)
|
||||
- [JSX](https://bun.com/docs/runtime/jsx)
|
||||
- [Environment variables](https://bun.com/docs/runtime/environment-variables)
|
||||
- [Environment variables](https://bun.com/docs/runtime/env)
|
||||
- [Bun APIs](https://bun.com/docs/runtime/bun-apis)
|
||||
- [Web APIs](https://bun.com/docs/runtime/web-apis)
|
||||
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-compat)
|
||||
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-apis)
|
||||
- [Single-file executable](https://bun.com/docs/bundler/executables)
|
||||
- [Plugins](https://bun.com/docs/runtime/plugins)
|
||||
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/watch-mode)
|
||||
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/hot)
|
||||
- [Module resolution](https://bun.com/docs/runtime/modules)
|
||||
- [Auto-install](https://bun.com/docs/runtime/autoimport)
|
||||
- [bunfig.toml](https://bun.com/docs/runtime/bunfig)
|
||||
@@ -230,7 +230,7 @@ bun upgrade --canary
|
||||
|
||||
- Ecosystem
|
||||
- [Use React and JSX](https://bun.com/guides/ecosystem/react)
|
||||
- [Use Gel with Bun](https://bun.com/guides/ecosystem/gel)
|
||||
- [Use EdgeDB with Bun](https://bun.com/guides/ecosystem/edgedb)
|
||||
- [Use Prisma with Bun](https://bun.com/guides/ecosystem/prisma)
|
||||
- [Add Sentry to a Bun app](https://bun.com/guides/ecosystem/sentry)
|
||||
- [Create a Discord bot](https://bun.com/guides/ecosystem/discordjs)
|
||||
|
||||
@@ -15,13 +15,11 @@
|
||||
"eventemitter3": "^5.0.0",
|
||||
"execa": "^8.0.1",
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3",
|
||||
},
|
||||
@@ -95,18 +93,6 @@
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="],
|
||||
|
||||
"@fastify/ajv-compiler": ["@fastify/ajv-compiler@4.0.2", "", { "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0" } }, "sha512-Rkiu/8wIjpsf46Rr+Fitd3HRP+VsxUFDDeag0hs9L0ksfnwx2g7SPQQTFL0E8Qv+rfXzQOxBJnjUB9ITUDjfWQ=="],
|
||||
|
||||
"@fastify/error": ["@fastify/error@4.2.0", "", {}, "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ=="],
|
||||
|
||||
"@fastify/fast-json-stringify-compiler": ["@fastify/fast-json-stringify-compiler@5.0.3", "", { "dependencies": { "fast-json-stringify": "^6.0.0" } }, "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ=="],
|
||||
|
||||
"@fastify/forwarded": ["@fastify/forwarded@3.0.0", "", {}, "sha512-kJExsp4JCms7ipzg7SJ3y8DwmePaELHxKYtg+tZow+k0znUTf3cb+npgyqm8+ATZOdmfgfydIebPDWM172wfyA=="],
|
||||
|
||||
"@fastify/merge-json-schemas": ["@fastify/merge-json-schemas@0.2.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A=="],
|
||||
|
||||
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.0.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
|
||||
@@ -157,20 +143,10 @@
|
||||
|
||||
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
|
||||
|
||||
"abstract-logging": ["abstract-logging@2.0.1", "", {}, "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="],
|
||||
|
||||
"ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="],
|
||||
|
||||
"ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="],
|
||||
|
||||
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
|
||||
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
|
||||
|
||||
"avvio": ["avvio@9.1.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw=="],
|
||||
|
||||
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
|
||||
|
||||
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
|
||||
@@ -191,16 +167,12 @@
|
||||
|
||||
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
|
||||
@@ -261,22 +233,10 @@
|
||||
|
||||
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
|
||||
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.1", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg=="],
|
||||
|
||||
"fast-json-stringify": ["fast-json-stringify@6.0.1", "", { "dependencies": { "@fastify/merge-json-schemas": "^0.2.0", "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0", "json-schema-ref-resolver": "^2.0.0", "rfdc": "^1.2.0" } }, "sha512-s7SJE83QKBZwg54dIbD5rCtzOBVD43V1ReWXXYqBgwCwHLYAAT0RQc/FmrQglXqWPpz6omtryJQOau5jI4Nrvg=="],
|
||||
|
||||
"fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="],
|
||||
|
||||
"fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="],
|
||||
|
||||
"fast-uri": ["fast-uri@3.0.6", "", {}, "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw=="],
|
||||
|
||||
"fastify": ["fastify@5.5.0", "", { "dependencies": { "@fastify/ajv-compiler": "^4.0.0", "@fastify/error": "^4.0.0", "@fastify/fast-json-stringify-compiler": "^5.0.0", "@fastify/proxy-addr": "^5.0.0", "abstract-logging": "^2.0.1", "avvio": "^9.0.0", "fast-json-stringify": "^6.0.0", "find-my-way": "^9.0.0", "light-my-request": "^6.0.0", "pino": "^9.0.0", "process-warning": "^5.0.0", "rfdc": "^1.3.1", "secure-json-parse": "^4.0.0", "semver": "^7.6.0", "toad-cache": "^3.7.0" } }, "sha512-ZWSWlzj3K/DcULCnCjEiC2zn2FBPdlZsSA/pnPa/dbUfLvxkD/Nqmb0XXMXLrWkeM4uQPUvjdJpwtXmTfriXqw=="],
|
||||
|
||||
"fastq": ["fastq@1.15.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw=="],
|
||||
|
||||
"fdir": ["fdir@6.1.0", "", { "peerDependencies": { "picomatch": "2.x" } }, "sha512-274qhz5PxNnA/fybOu6apTCUnM0GnO3QazB6VH+oag/7DQskdYq8lm07ZSm90kEQuWYH5GvjAxGruuHrEr0bcg=="],
|
||||
@@ -285,8 +245,6 @@
|
||||
|
||||
"fill-range": ["fill-range@7.0.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ=="],
|
||||
|
||||
"find-my-way": ["find-my-way@9.3.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-eRoFWQw+Yv2tuYlK2pjFS2jGXSxSppAs3hSQjfxVKxM5amECzIgYYc1FEI8ZmhSh/Ig+FrKEz43NLRKJjYCZVg=="],
|
||||
|
||||
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
|
||||
|
||||
"from": ["from@0.1.7", "", {}, "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g=="],
|
||||
@@ -315,8 +273,6 @@
|
||||
|
||||
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
|
||||
|
||||
"ipaddr.js": ["ipaddr.js@2.2.0", "", {}, "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
@@ -333,16 +289,10 @@
|
||||
|
||||
"jsesc": ["jsesc@2.5.2", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA=="],
|
||||
|
||||
"json-schema-ref-resolver": ["json-schema-ref-resolver@2.0.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-HG0SIB9X4J8bwbxCbnd5FfPEbcXAJYTi1pBJeP/QPON+w8ovSME8iRG+ElHNxZNX2Qh6eYn1GdzJFS4cDFfx0Q=="],
|
||||
|
||||
"json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="],
|
||||
|
||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
|
||||
"jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="],
|
||||
|
||||
"light-my-request": ["light-my-request@6.6.0", "", { "dependencies": { "cookie": "^1.0.1", "process-warning": "^4.0.0", "set-cookie-parser": "^2.6.0" } }, "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A=="],
|
||||
|
||||
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
@@ -373,8 +323,6 @@
|
||||
|
||||
"npm-run-path": ["npm-run-path@5.2.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg=="],
|
||||
|
||||
"on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="],
|
||||
|
||||
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
@@ -387,50 +335,24 @@
|
||||
|
||||
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"pino": ["pino@9.9.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-zxsRIQG9HzG+jEljmvmZupOMDUQ0Jpj0yAgE28jQvvrdYTlEaiGwelJpdndMl/MBuRr70heIj83QyqJUWaU8mQ=="],
|
||||
|
||||
"pino-abstract-transport": ["pino-abstract-transport@2.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw=="],
|
||||
|
||||
"pino-std-serializers": ["pino-std-serializers@7.0.0", "", {}, "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA=="],
|
||||
|
||||
"platform": ["platform@1.3.6", "", {}, "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg=="],
|
||||
|
||||
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
|
||||
|
||||
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
|
||||
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
|
||||
|
||||
"ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="],
|
||||
|
||||
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
|
||||
|
||||
"rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="],
|
||||
|
||||
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||
|
||||
"safe-regex2": ["safe-regex2@5.0.0", "", { "dependencies": { "ret": "~0.5.0" } }, "sha512-YwJwe5a51WlK7KbOJREPdjNrpViQBI3p4T50lfwPuDhZnE3XGVTlGvi+aolc5+RvxDD6bnUmjVsU9n1eboLUYw=="],
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
|
||||
|
||||
"semver": ["semver@6.3.0", "", { "bin": { "semver": "./bin/semver.js" } }, "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="],
|
||||
|
||||
"set-cookie-parser": ["set-cookie-parser@2.7.1", "", {}, "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="],
|
||||
|
||||
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
|
||||
|
||||
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
|
||||
@@ -441,12 +363,8 @@
|
||||
|
||||
"slash": ["slash@4.0.0", "", {}, "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew=="],
|
||||
|
||||
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
|
||||
|
||||
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
|
||||
"stream-combiner": ["stream-combiner@0.0.4", "", { "dependencies": { "duplexer": "~0.1.1" } }, "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw=="],
|
||||
|
||||
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
|
||||
@@ -457,8 +375,6 @@
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
|
||||
|
||||
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
|
||||
|
||||
"tinycolor2": ["tinycolor2@1.6.0", "", {}, "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw=="],
|
||||
@@ -467,8 +383,6 @@
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
|
||||
@@ -493,14 +407,8 @@
|
||||
|
||||
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
"avvio/fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
|
||||
|
||||
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"fastify/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="],
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
|
||||
@@ -1,29 +1,40 @@
|
||||
# Create T3 App
|
||||
# `install` benchmark
|
||||
|
||||
This is a [T3 Stack](https://create.t3.gg/) project bootstrapped with `create-t3-app`.
|
||||
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine). The goal of this benchmark is to compare installation performance of Bun with other package managers _when caches are hot_.
|
||||
|
||||
## What's next? How do I make an app with this?
|
||||
### With lockfile, online mode
|
||||
|
||||
We try to keep this project as simple as possible, so you can start with just the scaffolding we set up for you, and add additional things later when they become necessary.
|
||||
To run the benchmark with the standard "install" command for each package manager:
|
||||
|
||||
If you are not familiar with the different technologies used in this project, please refer to the respective docs. If you still are in the wind, please join our [Discord](https://t3.gg/discord) and ask for help.
|
||||
```sh
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
|
||||
```
|
||||
|
||||
- [Next.js](https://nextjs.org)
|
||||
- [NextAuth.js](https://next-auth.js.org)
|
||||
- [Prisma](https://prisma.io)
|
||||
- [Drizzle](https://orm.drizzle.team)
|
||||
- [Tailwind CSS](https://tailwindcss.com)
|
||||
- [tRPC](https://trpc.io)
|
||||
### With lockfile, offline mode
|
||||
|
||||
## Learn More
|
||||
Even though all packages are cached, some tools may hit the npm API during the version resolution step. (This is not the same as re-downloading a package.) To entirely avoid network calls, the other package managers require `--prefer-offline/--offline` flag. To run the benchmark using "offline" mode:
|
||||
|
||||
To learn more about the [T3 Stack](https://create.t3.gg/), take a look at the following resources:
|
||||
```sh
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --runs 1 'bun install' 'pnpm install --prefer-offline' 'yarn --offline' 'npm install --prefer-offline'
|
||||
```
|
||||
|
||||
- [Documentation](https://create.t3.gg/)
|
||||
- [Learn the T3 Stack](https://create.t3.gg/en/faq#what-learning-resources-are-currently-available) — Check out these awesome tutorials
|
||||
### Without lockfile, offline mode
|
||||
|
||||
You can check out the [create-t3-app GitHub repository](https://github.com/t3-oss/create-t3-app) — your feedback and contributions are welcome!
|
||||
To run the benchmark with offline mode but without lockfiles:
|
||||
|
||||
## How do I deploy this?
|
||||
```sh
|
||||
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 'rm bun.lock && bun install' 'rm pnpm-lock.yaml && pnpm install --prefer-offline' 'rm yarn.lock && yarn --offline' 'rm package-lock.json && npm install --prefer-offline'
|
||||
```
|
||||
|
||||
Follow our deployment guides for [Vercel](https://create.t3.gg/en/deployment/vercel), [Netlify](https://create.t3.gg/en/deployment/netlify) and [Docker](https://create.t3.gg/en/deployment/docker) for more information.
|
||||
##
|
||||
|
||||
To check that the app is working as expected:
|
||||
|
||||
```
|
||||
$ bun run dev
|
||||
$ npm run dev
|
||||
$ yarn dev
|
||||
$ pnpm dev
|
||||
```
|
||||
|
||||
Then visit [http://localhost:3000](http://localhost:3000).
|
||||
|
||||
18
bench/install/app/entry.client.tsx
Normal file
18
bench/install/app/entry.client.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* By default, Remix will handle hydrating your app on the client for you.
|
||||
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.client
|
||||
*/
|
||||
|
||||
import { RemixBrowser } from "@remix-run/react";
|
||||
import { startTransition, StrictMode } from "react";
|
||||
import { hydrateRoot } from "react-dom/client";
|
||||
|
||||
startTransition(() => {
|
||||
hydrateRoot(
|
||||
document,
|
||||
<StrictMode>
|
||||
<RemixBrowser />
|
||||
</StrictMode>,
|
||||
);
|
||||
});
|
||||
101
bench/install/app/entry.server.tsx
Normal file
101
bench/install/app/entry.server.tsx
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* By default, Remix will handle generating the HTTP Response for you.
|
||||
* You are free to delete this file if you'd like to, but if you ever want it revealed again, you can run `npx remix reveal` ✨
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
|
||||
*/
|
||||
|
||||
import type { EntryContext } from "@remix-run/node";
|
||||
import { Response } from "@remix-run/node";
|
||||
import { RemixServer } from "@remix-run/react";
|
||||
import isbot from "isbot";
|
||||
import { PassThrough } from "node:stream";
|
||||
import { renderToPipeableStream } from "react-dom/server";
|
||||
|
||||
const ABORT_DELAY = 5_000;
|
||||
|
||||
export default function handleRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return isbot(request.headers.get("user-agent"))
|
||||
? handleBotRequest(request, responseStatusCode, responseHeaders, remixContext)
|
||||
: handleBrowserRequest(request, responseStatusCode, responseHeaders, remixContext);
|
||||
}
|
||||
|
||||
function handleBotRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { pipe, abort } = renderToPipeableStream(
|
||||
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
|
||||
{
|
||||
onAllReady() {
|
||||
const body = new PassThrough();
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
resolve(
|
||||
new Response(body, {
|
||||
headers: responseHeaders,
|
||||
status: responseStatusCode,
|
||||
}),
|
||||
);
|
||||
|
||||
pipe(body);
|
||||
},
|
||||
onShellError(error: unknown) {
|
||||
reject(error);
|
||||
},
|
||||
onError(error: unknown) {
|
||||
responseStatusCode = 500;
|
||||
console.error(error);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
setTimeout(abort, ABORT_DELAY);
|
||||
});
|
||||
}
|
||||
|
||||
function handleBrowserRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext,
|
||||
) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const { pipe, abort } = renderToPipeableStream(
|
||||
<RemixServer context={remixContext} url={request.url} abortDelay={ABORT_DELAY} />,
|
||||
{
|
||||
onShellReady() {
|
||||
const body = new PassThrough();
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
resolve(
|
||||
new Response(body, {
|
||||
headers: responseHeaders,
|
||||
status: responseStatusCode,
|
||||
}),
|
||||
);
|
||||
|
||||
pipe(body);
|
||||
},
|
||||
onShellError(error: unknown) {
|
||||
reject(error);
|
||||
},
|
||||
onError(error: unknown) {
|
||||
console.error(error);
|
||||
responseStatusCode = 500;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
setTimeout(abort, ABORT_DELAY);
|
||||
});
|
||||
}
|
||||
20
bench/install/app/root.tsx
Normal file
20
bench/install/app/root.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Links, LiveReload, Meta, Outlet, Scripts, ScrollRestoration } from "@remix-run/react";
|
||||
|
||||
export default function App() {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charSet="utf-8" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
||||
<Meta />
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
<Outlet />
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
<LiveReload />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
30
bench/install/app/routes/_index.tsx
Normal file
30
bench/install/app/routes/_index.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { V2_MetaFunction } from "@remix-run/node";
|
||||
|
||||
export const meta: V2_MetaFunction = () => {
|
||||
return [{ title: "New Remix App" }];
|
||||
};
|
||||
|
||||
export default function Index() {
|
||||
return (
|
||||
<div style={{ fontFamily: "system-ui, sans-serif", lineHeight: "1.4" }}>
|
||||
<h1>Welcome to Remix</h1>
|
||||
<ul>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/tutorials/blog" rel="noreferrer">
|
||||
15m Quickstart Blog Tutorial
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/tutorials/jokes" rel="noreferrer">
|
||||
Deep Dive Jokes App Tutorial
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a target="_blank" href="https://remix.run/docs" rel="noreferrer">
|
||||
Remix Docs
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,488 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "installbench",
|
||||
"dependencies": {
|
||||
"@auth/drizzle-adapter": "^1.7.2",
|
||||
"@t3-oss/env-nextjs": "^0.12.0",
|
||||
"@tanstack/react-query": "^5.69.0",
|
||||
"@trpc/client": "^11.0.0",
|
||||
"@trpc/react-query": "^11.0.0",
|
||||
"@trpc/server": "^11.0.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"esbuild": "^0.25.11",
|
||||
"next": "^15.2.3",
|
||||
"next-auth": "5.0.0-beta.25",
|
||||
"postgres": "^3.4.4",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"server-only": "^0.0.1",
|
||||
"superjson": "^2.2.1",
|
||||
"zod": "^3.24.2",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.0.15",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
"postcss": "^8.5.3",
|
||||
"tailwindcss": "^4.0.15",
|
||||
"typescript": "^5.8.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
|
||||
|
||||
"@auth/core": ["@auth/core@0.41.1", "", { "dependencies": { "@panva/hkdf": "1.2.1", "jose": "6.1.0", "oauth4webapi": "3.8.2", "preact": "10.24.3", "preact-render-to-string": "6.5.11" } }, "sha512-t9cJ2zNYAdWMacGRMT6+r4xr1uybIdmYa49calBPeTqwgAFPV/88ac9TEvCR85pvATiSPt8VaNf+Gt24JIT/uw=="],
|
||||
|
||||
"@auth/drizzle-adapter": ["@auth/drizzle-adapter@1.11.1", "", { "dependencies": { "@auth/core": "0.41.1" } }, "sha512-cQTvDZqsyF7RPhDm/B6SvqdVP9EzQhy3oM4Muu7fjjmSYFLbSR203E6dH631ZHSKDn2b4WZkfMnjPDzRsPSAeA=="],
|
||||
|
||||
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
|
||||
|
||||
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="],
|
||||
|
||||
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="],
|
||||
|
||||
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="],
|
||||
|
||||
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="],
|
||||
|
||||
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="],
|
||||
|
||||
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="],
|
||||
|
||||
"@drizzle-team/brocli": ["@drizzle-team/brocli@0.10.2", "", {}, "sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w=="],
|
||||
|
||||
"@emnapi/runtime": ["@emnapi/runtime@1.6.0", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA=="],
|
||||
|
||||
"@esbuild-kit/core-utils": ["@esbuild-kit/core-utils@3.3.2", "", { "dependencies": { "esbuild": "0.18.20", "source-map-support": "0.5.21" } }, "sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ=="],
|
||||
|
||||
"@esbuild-kit/esm-loader": ["@esbuild-kit/esm-loader@2.6.5", "", { "dependencies": { "@esbuild-kit/core-utils": "3.3.2", "get-tsconfig": "4.13.0" } }, "sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA=="],
|
||||
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.11", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg=="],
|
||||
|
||||
"@esbuild/android-arm": ["@esbuild/android-arm@0.25.11", "", { "os": "android", "cpu": "arm" }, "sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg=="],
|
||||
|
||||
"@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.11", "", { "os": "android", "cpu": "arm64" }, "sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ=="],
|
||||
|
||||
"@esbuild/android-x64": ["@esbuild/android-x64@0.25.11", "", { "os": "android", "cpu": "x64" }, "sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g=="],
|
||||
|
||||
"@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.11", "", { "os": "darwin", "cpu": "arm64" }, "sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w=="],
|
||||
|
||||
"@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.11", "", { "os": "darwin", "cpu": "x64" }, "sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ=="],
|
||||
|
||||
"@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.11", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA=="],
|
||||
|
||||
"@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.11", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw=="],
|
||||
|
||||
"@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.11", "", { "os": "linux", "cpu": "arm" }, "sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw=="],
|
||||
|
||||
"@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.11", "", { "os": "linux", "cpu": "arm64" }, "sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA=="],
|
||||
|
||||
"@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.11", "", { "os": "linux", "cpu": "ia32" }, "sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw=="],
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw=="],
|
||||
|
||||
"@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ=="],
|
||||
|
||||
"@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.11", "", { "os": "linux", "cpu": "ppc64" }, "sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw=="],
|
||||
|
||||
"@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww=="],
|
||||
|
||||
"@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.11", "", { "os": "linux", "cpu": "s390x" }, "sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw=="],
|
||||
|
||||
"@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.11", "", { "os": "linux", "cpu": "x64" }, "sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ=="],
|
||||
|
||||
"@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.11", "", { "os": "none", "cpu": "arm64" }, "sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg=="],
|
||||
|
||||
"@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.11", "", { "os": "none", "cpu": "x64" }, "sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A=="],
|
||||
|
||||
"@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.11", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg=="],
|
||||
|
||||
"@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.11", "", { "os": "openbsd", "cpu": "x64" }, "sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw=="],
|
||||
|
||||
"@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.11", "", { "os": "none", "cpu": "arm64" }, "sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ=="],
|
||||
|
||||
"@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.11", "", { "os": "sunos", "cpu": "x64" }, "sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA=="],
|
||||
|
||||
"@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.11", "", { "os": "win32", "cpu": "arm64" }, "sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q=="],
|
||||
|
||||
"@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.11", "", { "os": "win32", "cpu": "ia32" }, "sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA=="],
|
||||
|
||||
"@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.11", "", { "os": "win32", "cpu": "x64" }, "sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA=="],
|
||||
|
||||
"@img/colour": ["@img/colour@1.0.0", "", {}, "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw=="],
|
||||
|
||||
"@img/sharp-darwin-arm64": ["@img/sharp-darwin-arm64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-arm64": "1.2.3" }, "os": "darwin", "cpu": "arm64" }, "sha512-sitdlPzDVyvmINUdJle3TNHl+AG9QcwiAMsXmccqsCOMZNIdW2/7S26w0LyU8euiLVzFBL3dXPwVCq/ODnf2vA=="],
|
||||
|
||||
"@img/sharp-darwin-x64": ["@img/sharp-darwin-x64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-darwin-x64": "1.2.3" }, "os": "darwin", "cpu": "x64" }, "sha512-rZheupWIoa3+SOdF/IcUe1ah4ZDpKBGWcsPX6MT0lYniH9micvIU7HQkYTfrx5Xi8u+YqwLtxC/3vl8TQN6rMg=="],
|
||||
|
||||
"@img/sharp-libvips-darwin-arm64": ["@img/sharp-libvips-darwin-arm64@1.2.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QzWAKo7kpHxbuHqUC28DZ9pIKpSi2ts2OJnoIGI26+HMgq92ZZ4vk8iJd4XsxN+tYfNJxzH6W62X5eTcsBymHw=="],
|
||||
|
||||
"@img/sharp-libvips-darwin-x64": ["@img/sharp-libvips-darwin-x64@1.2.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-Ju+g2xn1E2AKO6YBhxjj+ACcsPQRHT0bhpglxcEf+3uyPY+/gL8veniKoo96335ZaPo03bdDXMv0t+BBFAbmRA=="],
|
||||
|
||||
"@img/sharp-libvips-linux-arm": ["@img/sharp-libvips-linux-arm@1.2.3", "", { "os": "linux", "cpu": "arm" }, "sha512-x1uE93lyP6wEwGvgAIV0gP6zmaL/a0tGzJs/BIDDG0zeBhMnuUPm7ptxGhUbcGs4okDJrk4nxgrmxpib9g6HpA=="],
|
||||
|
||||
"@img/sharp-libvips-linux-arm64": ["@img/sharp-libvips-linux-arm64@1.2.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-I4RxkXU90cpufazhGPyVujYwfIm9Nk1QDEmiIsaPwdnm013F7RIceaCc87kAH+oUB1ezqEvC6ga4m7MSlqsJvQ=="],
|
||||
|
||||
"@img/sharp-libvips-linux-ppc64": ["@img/sharp-libvips-linux-ppc64@1.2.3", "", { "os": "linux", "cpu": "ppc64" }, "sha512-Y2T7IsQvJLMCBM+pmPbM3bKT/yYJvVtLJGfCs4Sp95SjvnFIjynbjzsa7dY1fRJX45FTSfDksbTp6AGWudiyCg=="],
|
||||
|
||||
"@img/sharp-libvips-linux-s390x": ["@img/sharp-libvips-linux-s390x@1.2.3", "", { "os": "linux", "cpu": "s390x" }, "sha512-RgWrs/gVU7f+K7P+KeHFaBAJlNkD1nIZuVXdQv6S+fNA6syCcoboNjsV2Pou7zNlVdNQoQUpQTk8SWDHUA3y/w=="],
|
||||
|
||||
"@img/sharp-libvips-linux-x64": ["@img/sharp-libvips-linux-x64@1.2.3", "", { "os": "linux", "cpu": "x64" }, "sha512-3JU7LmR85K6bBiRzSUc/Ff9JBVIFVvq6bomKE0e63UXGeRw2HPVEjoJke1Yx+iU4rL7/7kUjES4dZ/81Qjhyxg=="],
|
||||
|
||||
"@img/sharp-libvips-linuxmusl-arm64": ["@img/sharp-libvips-linuxmusl-arm64@1.2.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-F9q83RZ8yaCwENw1GieztSfj5msz7GGykG/BA+MOUefvER69K/ubgFHNeSyUu64amHIYKGDs4sRCMzXVj8sEyw=="],
|
||||
|
||||
"@img/sharp-libvips-linuxmusl-x64": ["@img/sharp-libvips-linuxmusl-x64@1.2.3", "", { "os": "linux", "cpu": "x64" }, "sha512-U5PUY5jbc45ANM6tSJpsgqmBF/VsL6LnxJmIf11kB7J5DctHgqm0SkuXzVWtIY90GnJxKnC/JT251TDnk1fu/g=="],
|
||||
|
||||
"@img/sharp-linux-arm": ["@img/sharp-linux-arm@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm": "1.2.3" }, "os": "linux", "cpu": "arm" }, "sha512-Xyam4mlqM0KkTHYVSuc6wXRmM7LGN0P12li03jAnZ3EJWZqj83+hi8Y9UxZUbxsgsK1qOEwg7O0Bc0LjqQVtxA=="],
|
||||
|
||||
"@img/sharp-linux-arm64": ["@img/sharp-linux-arm64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-arm64": "1.2.3" }, "os": "linux", "cpu": "arm64" }, "sha512-YXU1F/mN/Wu786tl72CyJjP/Ngl8mGHN1hST4BGl+hiW5jhCnV2uRVTNOcaYPs73NeT/H8Upm3y9582JVuZHrQ=="],
|
||||
|
||||
"@img/sharp-linux-ppc64": ["@img/sharp-linux-ppc64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-ppc64": "1.2.3" }, "os": "linux", "cpu": "ppc64" }, "sha512-F4PDtF4Cy8L8hXA2p3TO6s4aDt93v+LKmpcYFLAVdkkD3hSxZzee0rh6/+94FpAynsuMpLX5h+LRsSG3rIciUQ=="],
|
||||
|
||||
"@img/sharp-linux-s390x": ["@img/sharp-linux-s390x@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-s390x": "1.2.3" }, "os": "linux", "cpu": "s390x" }, "sha512-qVrZKE9Bsnzy+myf7lFKvng6bQzhNUAYcVORq2P7bDlvmF6u2sCmK2KyEQEBdYk+u3T01pVsPrkj943T1aJAsw=="],
|
||||
|
||||
"@img/sharp-linux-x64": ["@img/sharp-linux-x64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linux-x64": "1.2.3" }, "os": "linux", "cpu": "x64" }, "sha512-ZfGtcp2xS51iG79c6Vhw9CWqQC8l2Ot8dygxoDoIQPTat/Ov3qAa8qpxSrtAEAJW+UjTXc4yxCjNfxm4h6Xm2A=="],
|
||||
|
||||
"@img/sharp-linuxmusl-arm64": ["@img/sharp-linuxmusl-arm64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-arm64": "1.2.3" }, "os": "linux", "cpu": "arm64" }, "sha512-8hDVvW9eu4yHWnjaOOR8kHVrew1iIX+MUgwxSuH2XyYeNRtLUe4VNioSqbNkB7ZYQJj9rUTT4PyRscyk2PXFKA=="],
|
||||
|
||||
"@img/sharp-linuxmusl-x64": ["@img/sharp-linuxmusl-x64@0.34.4", "", { "optionalDependencies": { "@img/sharp-libvips-linuxmusl-x64": "1.2.3" }, "os": "linux", "cpu": "x64" }, "sha512-lU0aA5L8QTlfKjpDCEFOZsTYGn3AEiO6db8W5aQDxj0nQkVrZWmN3ZP9sYKWJdtq3PWPhUNlqehWyXpYDcI9Sg=="],
|
||||
|
||||
"@img/sharp-wasm32": ["@img/sharp-wasm32@0.34.4", "", { "dependencies": { "@emnapi/runtime": "1.6.0" }, "cpu": "none" }, "sha512-33QL6ZO/qpRyG7woB/HUALz28WnTMI2W1jgX3Nu2bypqLIKx/QKMILLJzJjI+SIbvXdG9fUnmrxR7vbi1sTBeA=="],
|
||||
|
||||
"@img/sharp-win32-arm64": ["@img/sharp-win32-arm64@0.34.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-2Q250do/5WXTwxW3zjsEuMSv5sUU4Tq9VThWKlU2EYLm4MB7ZeMwF+SFJutldYODXF6jzc6YEOC+VfX0SZQPqA=="],
|
||||
|
||||
"@img/sharp-win32-ia32": ["@img/sharp-win32-ia32@0.34.4", "", { "os": "win32", "cpu": "ia32" }, "sha512-3ZeLue5V82dT92CNL6rsal6I2weKw1cYu+rGKm8fOCCtJTR2gYeUfY3FqUnIJsMUPIH68oS5jmZ0NiJ508YpEw=="],
|
||||
|
||||
"@img/sharp-win32-x64": ["@img/sharp-win32-x64@0.34.4", "", { "os": "win32", "cpu": "x64" }, "sha512-xIyj4wpYs8J18sVN3mSQjwrw7fKUqRw+Z5rnHNCy5fYTxigBz81u5mOMPmFumwjcn8+ld1ppptMBCLic1nz6ig=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "1.5.5", "@jridgewell/trace-mapping": "0.3.31" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="],
|
||||
|
||||
"@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "0.3.13", "@jridgewell/trace-mapping": "0.3.31" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
|
||||
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="],
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "3.1.2", "@jridgewell/sourcemap-codec": "1.5.5" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="],
|
||||
|
||||
"@next/env": ["@next/env@15.5.6", "", {}, "sha512-3qBGRW+sCGzgbpc5TS1a0p7eNxnOarGVQhZxfvTdnV0gFI61lX7QNtQ4V1TSREctXzYn5NetbUsLvyqwLFJM6Q=="],
|
||||
|
||||
"@next/swc-darwin-arm64": ["@next/swc-darwin-arm64@15.5.6", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ES3nRz7N+L5Umz4KoGfZ4XX6gwHplwPhioVRc25+QNsDa7RtUF/z8wJcbuQ2Tffm5RZwuN2A063eapoJ1u4nPg=="],
|
||||
|
||||
"@next/swc-darwin-x64": ["@next/swc-darwin-x64@15.5.6", "", { "os": "darwin", "cpu": "x64" }, "sha512-JIGcytAyk9LQp2/nuVZPAtj8uaJ/zZhsKOASTjxDug0SPU9LAM3wy6nPU735M1OqacR4U20LHVF5v5Wnl9ptTA=="],
|
||||
|
||||
"@next/swc-linux-arm64-gnu": ["@next/swc-linux-arm64-gnu@15.5.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-qvz4SVKQ0P3/Im9zcS2RmfFL/UCQnsJKJwQSkissbngnB/12c6bZTCB0gHTexz1s6d/mD0+egPKXAIRFVS7hQg=="],
|
||||
|
||||
"@next/swc-linux-arm64-musl": ["@next/swc-linux-arm64-musl@15.5.6", "", { "os": "linux", "cpu": "arm64" }, "sha512-FsbGVw3SJz1hZlvnWD+T6GFgV9/NYDeLTNQB2MXoPN5u9VA9OEDy6fJEfePfsUKAhJufFbZLgp0cPxMuV6SV0w=="],
|
||||
|
||||
"@next/swc-linux-x64-gnu": ["@next/swc-linux-x64-gnu@15.5.6", "", { "os": "linux", "cpu": "x64" }, "sha512-3QnHGFWlnvAgyxFxt2Ny8PTpXtQD7kVEeaFat5oPAHHI192WKYB+VIKZijtHLGdBBvc16tiAkPTDmQNOQ0dyrA=="],
|
||||
|
||||
"@next/swc-linux-x64-musl": ["@next/swc-linux-x64-musl@15.5.6", "", { "os": "linux", "cpu": "x64" }, "sha512-OsGX148sL+TqMK9YFaPFPoIaJKbFJJxFzkXZljIgA9hjMjdruKht6xDCEv1HLtlLNfkx3c5w2GLKhj7veBQizQ=="],
|
||||
|
||||
"@next/swc-win32-arm64-msvc": ["@next/swc-win32-arm64-msvc@15.5.6", "", { "os": "win32", "cpu": "arm64" }, "sha512-ONOMrqWxdzXDJNh2n60H6gGyKed42Ieu6UTVPZteXpuKbLZTH4G4eBMsr5qWgOBA+s7F+uB4OJbZnrkEDnZ5Fg=="],
|
||||
|
||||
"@next/swc-win32-x64-msvc": ["@next/swc-win32-x64-msvc@15.5.6", "", { "os": "win32", "cpu": "x64" }, "sha512-pxK4VIjFRx1MY92UycLOOw7dTdvccWsNETQ0kDHkBlcFH1GrTLUjSiHU1ohrznnux6TqRHgv5oflhfIWZwVROQ=="],
|
||||
|
||||
"@panva/hkdf": ["@panva/hkdf@1.2.1", "", {}, "sha512-6oclG6Y3PiDFcoyk8srjLfVKyMfVCKJ27JwNPViuXziFpmdz+MZnZN/aKY0JGXgYuO/VghU0jcOAZgWXZ1Dmrw=="],
|
||||
|
||||
"@petamoriken/float16": ["@petamoriken/float16@3.9.3", "", {}, "sha512-8awtpHXCx/bNpFt4mt2xdkgtgVvKqty8VbjHI/WWWQuEw+KLzFot3f4+LkQY9YmOtq7A5GdOnqoIC8Pdygjk2g=="],
|
||||
|
||||
"@swc/helpers": ["@swc/helpers@0.5.15", "", { "dependencies": { "tslib": "2.8.1" } }, "sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g=="],
|
||||
|
||||
"@t3-oss/env-core": ["@t3-oss/env-core@0.12.0", "", { "optionalDependencies": { "typescript": "5.9.3", "zod": "3.25.76" } }, "sha512-lOPj8d9nJJTt81mMuN9GMk8x5veOt7q9m11OSnCBJhwp1QrL/qR+M8Y467ULBSm9SunosryWNbmQQbgoiMgcdw=="],
|
||||
|
||||
"@t3-oss/env-nextjs": ["@t3-oss/env-nextjs@0.12.0", "", { "dependencies": { "@t3-oss/env-core": "0.12.0" }, "optionalDependencies": { "typescript": "5.9.3", "zod": "3.25.76" } }, "sha512-rFnvYk1049RnNVUPvY8iQ55AuQh1Rr+qZzQBh3t++RttCGK4COpXGNxS4+45afuQq02lu+QAOy/5955aU8hRKw=="],
|
||||
|
||||
"@tailwindcss/node": ["@tailwindcss/node@4.1.16", "", { "dependencies": { "@jridgewell/remapping": "2.3.5", "enhanced-resolve": "5.18.3", "jiti": "2.6.1", "lightningcss": "1.30.2", "magic-string": "0.30.21", "source-map-js": "1.2.1", "tailwindcss": "4.1.16" } }, "sha512-BX5iaSsloNuvKNHRN3k2RcCuTEgASTo77mofW0vmeHkfrDWaoFAFvNHpEgtu0eqyypcyiBkDWzSMxJhp3AUVcw=="],
|
||||
|
||||
"@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.16", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.16", "@tailwindcss/oxide-darwin-arm64": "4.1.16", "@tailwindcss/oxide-darwin-x64": "4.1.16", "@tailwindcss/oxide-freebsd-x64": "4.1.16", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.16", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.16", "@tailwindcss/oxide-linux-arm64-musl": "4.1.16", "@tailwindcss/oxide-linux-x64-gnu": "4.1.16", "@tailwindcss/oxide-linux-x64-musl": "4.1.16", "@tailwindcss/oxide-wasm32-wasi": "4.1.16", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.16", "@tailwindcss/oxide-win32-x64-msvc": "4.1.16" } }, "sha512-2OSv52FRuhdlgyOQqgtQHuCgXnS8nFSYRp2tJ+4WZXKgTxqPy7SMSls8c3mPT5pkZ17SBToGM5LHEJBO7miEdg=="],
|
||||
|
||||
"@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.16", "", { "os": "android", "cpu": "arm64" }, "sha512-8+ctzkjHgwDJ5caq9IqRSgsP70xhdhJvm+oueS/yhD5ixLhqTw9fSL1OurzMUhBwE5zK26FXLCz2f/RtkISqHA=="],
|
||||
|
||||
"@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.16", "", { "os": "darwin", "cpu": "arm64" }, "sha512-C3oZy5042v2FOALBZtY0JTDnGNdS6w7DxL/odvSny17ORUnaRKhyTse8xYi3yKGyfnTUOdavRCdmc8QqJYwFKA=="],
|
||||
|
||||
"@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.16", "", { "os": "darwin", "cpu": "x64" }, "sha512-vjrl/1Ub9+JwU6BP0emgipGjowzYZMjbWCDqwA2Z4vCa+HBSpP4v6U2ddejcHsolsYxwL5r4bPNoamlV0xDdLg=="],
|
||||
|
||||
"@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.16", "", { "os": "freebsd", "cpu": "x64" }, "sha512-TSMpPYpQLm+aR1wW5rKuUuEruc/oOX3C7H0BTnPDn7W/eMw8W+MRMpiypKMkXZfwH8wqPIRKppuZoedTtNj2tg=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.16", "", { "os": "linux", "cpu": "arm" }, "sha512-p0GGfRg/w0sdsFKBjMYvvKIiKy/LNWLWgV/plR4lUgrsxFAoQBFrXkZ4C0w8IOXfslB9vHK/JGASWD2IefIpvw=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.16", "", { "os": "linux", "cpu": "arm64" }, "sha512-DoixyMmTNO19rwRPdqviTrG1rYzpxgyYJl8RgQvdAQUzxC1ToLRqtNJpU/ATURSKgIg6uerPw2feW0aS8SNr/w=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.16", "", { "os": "linux", "cpu": "arm64" }, "sha512-H81UXMa9hJhWhaAUca6bU2wm5RRFpuHImrwXBUvPbYb+3jo32I9VIwpOX6hms0fPmA6f2pGVlybO6qU8pF4fzQ=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.16", "", { "os": "linux", "cpu": "x64" }, "sha512-ZGHQxDtFC2/ruo7t99Qo2TTIvOERULPl5l0K1g0oK6b5PGqjYMga+FcY1wIUnrUxY56h28FxybtDEla+ICOyew=="],
|
||||
|
||||
"@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.16", "", { "os": "linux", "cpu": "x64" }, "sha512-Oi1tAaa0rcKf1Og9MzKeINZzMLPbhxvm7rno5/zuP1WYmpiG0bEHq4AcRUiG2165/WUzvxkW4XDYCscZWbTLZw=="],
|
||||
|
||||
"@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.16", "", { "cpu": "none" }, "sha512-B01u/b8LteGRwucIBmCQ07FVXLzImWESAIMcUU6nvFt/tYsQ6IHz8DmZ5KtvmwxD+iTYBtM1xwoGXswnlu9v0Q=="],
|
||||
|
||||
"@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.16", "", { "os": "win32", "cpu": "arm64" }, "sha512-zX+Q8sSkGj6HKRTMJXuPvOcP8XfYON24zJBRPlszcH1Np7xuHXhWn8qfFjIujVzvH3BHU+16jBXwgpl20i+v9A=="],
|
||||
|
||||
"@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.16", "", { "os": "win32", "cpu": "x64" }, "sha512-m5dDFJUEejbFqP+UXVstd4W/wnxA4F61q8SoL+mqTypId2T2ZpuxosNSgowiCnLp2+Z+rivdU0AqpfgiD7yCBg=="],
|
||||
|
||||
"@tailwindcss/postcss": ["@tailwindcss/postcss@4.1.16", "", { "dependencies": { "@alloc/quick-lru": "5.2.0", "@tailwindcss/node": "4.1.16", "@tailwindcss/oxide": "4.1.16", "postcss": "8.5.6", "tailwindcss": "4.1.16" } }, "sha512-Qn3SFGPXYQMKR/UtqS+dqvPrzEeBZHrFA92maT4zijCVggdsXnDBMsPFJo1eArX3J+O+Gi+8pV4PkqjLCNBk3A=="],
|
||||
|
||||
"@tanstack/query-core": ["@tanstack/query-core@5.90.5", "", {}, "sha512-wLamYp7FaDq6ZnNehypKI5fNvxHPfTYylE0m/ZpuuzJfJqhR5Pxg9gvGBHZx4n7J+V5Rg5mZxHHTlv25Zt5u+w=="],
|
||||
|
||||
"@tanstack/react-query": ["@tanstack/react-query@5.90.5", "", { "dependencies": { "@tanstack/query-core": "5.90.5" }, "peerDependencies": { "react": "19.2.0" } }, "sha512-pN+8UWpxZkEJ/Rnnj2v2Sxpx1WFlaa9L6a4UO89p6tTQbeo+m0MS8oYDjbggrR8QcTyjKoYWKS3xJQGr3ExT8Q=="],
|
||||
|
||||
"@trpc/client": ["@trpc/client@11.7.1", "", { "peerDependencies": { "@trpc/server": "11.7.1", "typescript": "5.9.3" } }, "sha512-uOnAjElKI892/U6aQMcBHYs3x7mme3Cvv1F87ytBL56rBvs7+DyK7r43zgaXKf13+GtPEI6ex5xjVUfyDW8XcQ=="],
|
||||
|
||||
"@trpc/react-query": ["@trpc/react-query@11.7.1", "", { "peerDependencies": { "@tanstack/react-query": "5.90.5", "@trpc/client": "11.7.1", "@trpc/server": "11.7.1", "react": "19.2.0", "react-dom": "19.2.0", "typescript": "5.9.3" } }, "sha512-dEHDjIqSTzO8nLlCbtiFBMBwhbSkK1QP7aYVo3nP3sYBna0b+iCtrPXdxVPCSopr9/aIqDTEh+dMRZa7yBgjfQ=="],
|
||||
|
||||
"@trpc/server": ["@trpc/server@11.7.1", "", { "peerDependencies": { "typescript": "5.9.3" } }, "sha512-N3U8LNLIP4g9C7LJ/sLkjuPHwqlvE3bnspzC4DEFVdvx2+usbn70P80E3wj5cjOTLhmhRiwJCSXhlB+MHfGeCw=="],
|
||||
|
||||
"@types/cookie": ["@types/cookie@0.6.0", "", {}, "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="],
|
||||
|
||||
"@types/node": ["@types/node@20.19.24", "", { "dependencies": { "undici-types": "6.21.0" } }, "sha512-FE5u0ezmi6y9OZEzlJfg37mqqf6ZDSF2V/NLjUyGrR9uTZ7Sb9F7bLNZ03S4XVUNRWGA7Ck4c1kK+YnuWjl+DA=="],
|
||||
|
||||
"@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "3.1.3" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@19.2.2", "", { "peerDependencies": { "@types/react": "19.2.2" } }, "sha512-9KQPoO6mZCi7jcIStSnlOWn2nEF3mNmyr3rIAsGnAbQKYbRLyqmeSc39EVgtxXVia+LMT8j3knZLAZAh+xLmrw=="],
|
||||
|
||||
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001752", "", {}, "sha512-vKUk7beoukxE47P5gcVNKkDRzXdVofotshHwfR9vmpeFKxmI5PBpgOMC18LUJUA/DvJ70Y7RveasIBraqsyO/g=="],
|
||||
|
||||
"client-only": ["client-only@0.0.1", "", {}, "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="],
|
||||
|
||||
"cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="],
|
||||
|
||||
"copy-anything": ["copy-anything@4.0.5", "", { "dependencies": { "is-what": "5.5.0" } }, "sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"debug": ["debug@4.4.3", "", { "dependencies": { "ms": "2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="],
|
||||
|
||||
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
|
||||
|
||||
"drizzle-kit": ["drizzle-kit@0.30.6", "", { "dependencies": { "@drizzle-team/brocli": "0.10.2", "@esbuild-kit/esm-loader": "2.6.5", "esbuild": "0.19.12", "esbuild-register": "3.6.0", "gel": "2.1.1" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-U4wWit0fyZuGuP7iNmRleQyK2V8wCuv57vf5l3MnG4z4fzNTjY/U13M8owyQ5RavqvqxBifWORaR3wIUzlN64g=="],
|
||||
|
||||
"drizzle-orm": ["drizzle-orm@0.41.0", "", { "optionalDependencies": { "gel": "2.1.1", "postgres": "3.4.7" } }, "sha512-7A4ZxhHk9gdlXmTdPj/lREtP+3u8KvZ4yEN6MYVxBzZGex5Wtdc+CWSbu7btgF6TB0N+MNPrvW7RKBbxJchs/Q=="],
|
||||
|
||||
"enhanced-resolve": ["enhanced-resolve@5.18.3", "", { "dependencies": { "graceful-fs": "4.2.11", "tapable": "2.3.0" } }, "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww=="],
|
||||
|
||||
"env-paths": ["env-paths@3.0.0", "", {}, "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A=="],
|
||||
|
||||
"esbuild": ["esbuild@0.25.11", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.11", "@esbuild/android-arm": "0.25.11", "@esbuild/android-arm64": "0.25.11", "@esbuild/android-x64": "0.25.11", "@esbuild/darwin-arm64": "0.25.11", "@esbuild/darwin-x64": "0.25.11", "@esbuild/freebsd-arm64": "0.25.11", "@esbuild/freebsd-x64": "0.25.11", "@esbuild/linux-arm": "0.25.11", "@esbuild/linux-arm64": "0.25.11", "@esbuild/linux-ia32": "0.25.11", "@esbuild/linux-loong64": "0.25.11", "@esbuild/linux-mips64el": "0.25.11", "@esbuild/linux-ppc64": "0.25.11", "@esbuild/linux-riscv64": "0.25.11", "@esbuild/linux-s390x": "0.25.11", "@esbuild/linux-x64": "0.25.11", "@esbuild/netbsd-arm64": "0.25.11", "@esbuild/netbsd-x64": "0.25.11", "@esbuild/openbsd-arm64": "0.25.11", "@esbuild/openbsd-x64": "0.25.11", "@esbuild/openharmony-arm64": "0.25.11", "@esbuild/sunos-x64": "0.25.11", "@esbuild/win32-arm64": "0.25.11", "@esbuild/win32-ia32": "0.25.11", "@esbuild/win32-x64": "0.25.11" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q=="],
|
||||
|
||||
"esbuild-register": ["esbuild-register@3.6.0", "", { "dependencies": { "debug": "4.4.3" }, "peerDependencies": { "esbuild": "0.19.12" } }, "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg=="],
|
||||
|
||||
"gel": ["gel@2.1.1", "", { "dependencies": { "@petamoriken/float16": "3.9.3", "debug": "4.4.3", "env-paths": "3.0.0", "semver": "7.7.3", "shell-quote": "1.8.3", "which": "4.0.0" }, "bin": { "gel": "dist/cli.mjs" } }, "sha512-Newg9X7mRYskoBjSw70l1YnJ/ZGbq64VPyR821H5WVkTGpHG2O0mQILxCeUhxdYERLFY9B4tUyKLyf3uMTjtKw=="],
|
||||
|
||||
"get-tsconfig": ["get-tsconfig@4.13.0", "", { "dependencies": { "resolve-pkg-maps": "1.0.0" } }, "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ=="],
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"is-what": ["is-what@5.5.0", "", {}, "sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw=="],
|
||||
|
||||
"isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="],
|
||||
|
||||
"jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="],
|
||||
|
||||
"jose": ["jose@6.1.0", "", {}, "sha512-TTQJyoEoKcC1lscpVDCSsVgYzUDg/0Bt3WE//WiTPK6uOCQC2KZS4MpugbMWt/zyjkopgZoXhZuCi00gLudfUA=="],
|
||||
|
||||
"lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "2.1.2" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ=="],
|
||||
|
||||
"lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="],
|
||||
|
||||
"lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="],
|
||||
|
||||
"lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="],
|
||||
|
||||
"lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="],
|
||||
|
||||
"lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="],
|
||||
|
||||
"lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="],
|
||||
|
||||
"lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="],
|
||||
|
||||
"lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w=="],
|
||||
|
||||
"lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA=="],
|
||||
|
||||
"lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="],
|
||||
|
||||
"lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="],
|
||||
|
||||
"magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "1.5.5" } }, "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="],
|
||||
|
||||
"next": ["next@15.5.6", "", { "dependencies": { "@next/env": "15.5.6", "@swc/helpers": "0.5.15", "caniuse-lite": "1.0.30001752", "postcss": "8.4.31", "styled-jsx": "5.1.6" }, "optionalDependencies": { "@next/swc-darwin-arm64": "15.5.6", "@next/swc-darwin-x64": "15.5.6", "@next/swc-linux-arm64-gnu": "15.5.6", "@next/swc-linux-arm64-musl": "15.5.6", "@next/swc-linux-x64-gnu": "15.5.6", "@next/swc-linux-x64-musl": "15.5.6", "@next/swc-win32-arm64-msvc": "15.5.6", "@next/swc-win32-x64-msvc": "15.5.6", "sharp": "0.34.4" }, "peerDependencies": { "react": "19.2.0", "react-dom": "19.2.0" }, "bin": { "next": "dist/bin/next" } }, "sha512-zTxsnI3LQo3c9HSdSf91O1jMNsEzIXDShXd4wVdg9y5shwLqBXi4ZtUUJyB86KGVSJLZx0PFONvO54aheGX8QQ=="],
|
||||
|
||||
"next-auth": ["next-auth@5.0.0-beta.25", "", { "dependencies": { "@auth/core": "0.37.2" }, "peerDependencies": { "next": "15.5.6", "react": "19.2.0" } }, "sha512-2dJJw1sHQl2qxCrRk+KTQbeH+izFbGFPuJj5eGgBZFYyiYYtvlrBeUw1E/OJJxTRjuxbSYGnCTkUIRsIIW0bog=="],
|
||||
|
||||
"oauth4webapi": ["oauth4webapi@3.8.2", "", {}, "sha512-FzZZ+bht5X0FKe7Mwz3DAVAmlH1BV5blSak/lHMBKz0/EBMhX6B10GlQYI51+oRp8ObJaX0g6pXrAxZh5s8rjw=="],
|
||||
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "3.3.11", "picocolors": "1.1.1", "source-map-js": "1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="],
|
||||
|
||||
"postgres": ["postgres@3.4.7", "", {}, "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw=="],
|
||||
|
||||
"preact": ["preact@10.24.3", "", {}, "sha512-Z2dPnBnMUfyQfSQ+GBdsGa16hz35YmLmtTLhM169uW944hYL6xzTYkJjC07j+Wosz733pMWx0fgON3JNw1jJQA=="],
|
||||
|
||||
"preact-render-to-string": ["preact-render-to-string@6.5.11", "", { "peerDependencies": { "preact": "10.24.3" } }, "sha512-ubnauqoGczeGISiOh6RjX0/cdaF8v/oDXIjO85XALCQjwQP+SB4RDXXtvZ6yTYSjG+PC1QRP2AhPgCEsM2EvUw=="],
|
||||
|
||||
"pretty-format": ["pretty-format@3.8.0", "", {}, "sha512-WuxUnVtlWL1OfZFQFuqvnvs6MiAGk9UNsBostyBOB0Is9wb5uRESevA6rnl/rkksXaGX3GzZhPup5d6Vp1nFew=="],
|
||||
|
||||
"react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="],
|
||||
|
||||
"react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "0.27.0" }, "peerDependencies": { "react": "19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="],
|
||||
|
||||
"resolve-pkg-maps": ["resolve-pkg-maps@1.0.0", "", {}, "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw=="],
|
||||
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
|
||||
"semver": ["semver@7.7.3", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="],
|
||||
|
||||
"server-only": ["server-only@0.0.1", "", {}, "sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA=="],
|
||||
|
||||
"sharp": ["sharp@0.34.4", "", { "dependencies": { "@img/colour": "1.0.0", "detect-libc": "2.1.2", "semver": "7.7.3" }, "optionalDependencies": { "@img/sharp-darwin-arm64": "0.34.4", "@img/sharp-darwin-x64": "0.34.4", "@img/sharp-libvips-darwin-arm64": "1.2.3", "@img/sharp-libvips-darwin-x64": "1.2.3", "@img/sharp-libvips-linux-arm": "1.2.3", "@img/sharp-libvips-linux-arm64": "1.2.3", "@img/sharp-libvips-linux-ppc64": "1.2.3", "@img/sharp-libvips-linux-s390x": "1.2.3", "@img/sharp-libvips-linux-x64": "1.2.3", "@img/sharp-libvips-linuxmusl-arm64": "1.2.3", "@img/sharp-libvips-linuxmusl-x64": "1.2.3", "@img/sharp-linux-arm": "0.34.4", "@img/sharp-linux-arm64": "0.34.4", "@img/sharp-linux-ppc64": "0.34.4", "@img/sharp-linux-s390x": "0.34.4", "@img/sharp-linux-x64": "0.34.4", "@img/sharp-linuxmusl-arm64": "0.34.4", "@img/sharp-linuxmusl-x64": "0.34.4", "@img/sharp-wasm32": "0.34.4", "@img/sharp-win32-arm64": "0.34.4", "@img/sharp-win32-ia32": "0.34.4", "@img/sharp-win32-x64": "0.34.4" } }, "sha512-FUH39xp3SBPnxWvd5iib1X8XY7J0K0X7d93sie9CJg2PO8/7gmg89Nve6OjItK53/MlAushNNxteBYfM6DEuoA=="],
|
||||
|
||||
"shell-quote": ["shell-quote@1.8.3", "", {}, "sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw=="],
|
||||
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "1.1.2", "source-map": "0.6.1" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="],
|
||||
|
||||
"styled-jsx": ["styled-jsx@5.1.6", "", { "dependencies": { "client-only": "0.0.1" }, "peerDependencies": { "react": "19.2.0" } }, "sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA=="],
|
||||
|
||||
"superjson": ["superjson@2.2.5", "", { "dependencies": { "copy-anything": "4.0.5" } }, "sha512-zWPTX96LVsA/eVYnqOM2+ofcdPqdS1dAF1LN4TS2/MWuUpfitd9ctTa87wt4xrYnZnkLtS69xpBdSxVBP5Rm6w=="],
|
||||
|
||||
"tailwindcss": ["tailwindcss@4.1.16", "", {}, "sha512-pONL5awpaQX4LN5eiv7moSiSPd/DLDzKVRJz8Q9PgzmAdd1R4307GQS2ZpfiN7ZmekdQrfhZZiSE5jkLR4WNaA=="],
|
||||
|
||||
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="],
|
||||
|
||||
"which": ["which@4.0.0", "", { "dependencies": { "isexe": "3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg=="],
|
||||
|
||||
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild": ["esbuild@0.18.20", "", { "optionalDependencies": { "@esbuild/android-arm": "0.18.20", "@esbuild/android-arm64": "0.18.20", "@esbuild/android-x64": "0.18.20", "@esbuild/darwin-arm64": "0.18.20", "@esbuild/darwin-x64": "0.18.20", "@esbuild/freebsd-arm64": "0.18.20", "@esbuild/freebsd-x64": "0.18.20", "@esbuild/linux-arm": "0.18.20", "@esbuild/linux-arm64": "0.18.20", "@esbuild/linux-ia32": "0.18.20", "@esbuild/linux-loong64": "0.18.20", "@esbuild/linux-mips64el": "0.18.20", "@esbuild/linux-ppc64": "0.18.20", "@esbuild/linux-riscv64": "0.18.20", "@esbuild/linux-s390x": "0.18.20", "@esbuild/linux-x64": "0.18.20", "@esbuild/netbsd-x64": "0.18.20", "@esbuild/openbsd-x64": "0.18.20", "@esbuild/sunos-x64": "0.18.20", "@esbuild/win32-arm64": "0.18.20", "@esbuild/win32-ia32": "0.18.20", "@esbuild/win32-x64": "0.18.20" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-ceqxoedUrcayh7Y7ZX6NdbbDzGROiyVBgC4PriJThBKSVPWnnFHZAkfI1lJT8QFkOwH4qOS2SJkS4wvpGl8BpA=="],
|
||||
|
||||
"drizzle-kit/esbuild": ["esbuild@0.19.12", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.19.12", "@esbuild/android-arm": "0.19.12", "@esbuild/android-arm64": "0.19.12", "@esbuild/android-x64": "0.19.12", "@esbuild/darwin-arm64": "0.19.12", "@esbuild/darwin-x64": "0.19.12", "@esbuild/freebsd-arm64": "0.19.12", "@esbuild/freebsd-x64": "0.19.12", "@esbuild/linux-arm": "0.19.12", "@esbuild/linux-arm64": "0.19.12", "@esbuild/linux-ia32": "0.19.12", "@esbuild/linux-loong64": "0.19.12", "@esbuild/linux-mips64el": "0.19.12", "@esbuild/linux-ppc64": "0.19.12", "@esbuild/linux-riscv64": "0.19.12", "@esbuild/linux-s390x": "0.19.12", "@esbuild/linux-x64": "0.19.12", "@esbuild/netbsd-x64": "0.19.12", "@esbuild/openbsd-x64": "0.19.12", "@esbuild/sunos-x64": "0.19.12", "@esbuild/win32-arm64": "0.19.12", "@esbuild/win32-ia32": "0.19.12", "@esbuild/win32-x64": "0.19.12" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg=="],
|
||||
|
||||
"next/postcss": ["postcss@8.4.31", "", { "dependencies": { "nanoid": "3.3.11", "picocolors": "1.1.1", "source-map-js": "1.2.1" } }, "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ=="],
|
||||
|
||||
"next-auth/@auth/core": ["@auth/core@0.37.2", "", { "dependencies": { "@panva/hkdf": "1.2.1", "@types/cookie": "0.6.0", "cookie": "0.7.1", "jose": "5.10.0", "oauth4webapi": "3.8.2", "preact": "10.11.3", "preact-render-to-string": "5.2.3" } }, "sha512-kUvzyvkcd6h1vpeMAojK2y7+PAV5H+0Cc9+ZlKYDFhDY31AlvsB+GW5vNO4qE3Y07KeQgvNO9U0QUx/fN62kBw=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.18.20", "", { "os": "android", "cpu": "arm" }, "sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.18.20", "", { "os": "android", "cpu": "arm64" }, "sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.18.20", "", { "os": "android", "cpu": "x64" }, "sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.18.20", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.18.20", "", { "os": "darwin", "cpu": "x64" }, "sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.18.20", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.18.20", "", { "os": "freebsd", "cpu": "x64" }, "sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.18.20", "", { "os": "linux", "cpu": "arm" }, "sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.18.20", "", { "os": "linux", "cpu": "arm64" }, "sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.18.20", "", { "os": "linux", "cpu": "ia32" }, "sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.18.20", "", { "os": "linux", "cpu": "ppc64" }, "sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.18.20", "", { "os": "linux", "cpu": "none" }, "sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.18.20", "", { "os": "linux", "cpu": "s390x" }, "sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.18.20", "", { "os": "linux", "cpu": "x64" }, "sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.18.20", "", { "os": "none", "cpu": "x64" }, "sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.18.20", "", { "os": "openbsd", "cpu": "x64" }, "sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.18.20", "", { "os": "sunos", "cpu": "x64" }, "sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.18.20", "", { "os": "win32", "cpu": "arm64" }, "sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.18.20", "", { "os": "win32", "cpu": "ia32" }, "sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g=="],
|
||||
|
||||
"@esbuild-kit/core-utils/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.18.20", "", { "os": "win32", "cpu": "x64" }, "sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.19.12", "", { "os": "aix", "cpu": "ppc64" }, "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/android-arm": ["@esbuild/android-arm@0.19.12", "", { "os": "android", "cpu": "arm" }, "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/android-arm64": ["@esbuild/android-arm64@0.19.12", "", { "os": "android", "cpu": "arm64" }, "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/android-x64": ["@esbuild/android-x64@0.19.12", "", { "os": "android", "cpu": "x64" }, "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.19.12", "", { "os": "darwin", "cpu": "arm64" }, "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.19.12", "", { "os": "darwin", "cpu": "x64" }, "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.19.12", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.19.12", "", { "os": "freebsd", "cpu": "x64" }, "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-arm": ["@esbuild/linux-arm@0.19.12", "", { "os": "linux", "cpu": "arm" }, "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.19.12", "", { "os": "linux", "cpu": "arm64" }, "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.19.12", "", { "os": "linux", "cpu": "ia32" }, "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.19.12", "", { "os": "linux", "cpu": "none" }, "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.19.12", "", { "os": "linux", "cpu": "none" }, "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.19.12", "", { "os": "linux", "cpu": "ppc64" }, "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.19.12", "", { "os": "linux", "cpu": "none" }, "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.19.12", "", { "os": "linux", "cpu": "s390x" }, "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/linux-x64": ["@esbuild/linux-x64@0.19.12", "", { "os": "linux", "cpu": "x64" }, "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.19.12", "", { "os": "none", "cpu": "x64" }, "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.19.12", "", { "os": "openbsd", "cpu": "x64" }, "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.19.12", "", { "os": "sunos", "cpu": "x64" }, "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.19.12", "", { "os": "win32", "cpu": "arm64" }, "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.19.12", "", { "os": "win32", "cpu": "ia32" }, "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ=="],
|
||||
|
||||
"drizzle-kit/esbuild/@esbuild/win32-x64": ["@esbuild/win32-x64@0.19.12", "", { "os": "win32", "cpu": "x64" }, "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA=="],
|
||||
|
||||
"next-auth/@auth/core/jose": ["jose@5.10.0", "", {}, "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg=="],
|
||||
|
||||
"next-auth/@auth/core/preact": ["preact@10.11.3", "", {}, "sha512-eY93IVpod/zG3uMF22Unl8h9KkrcKIRs2EGar8hwLZZDU1lkjph303V9HZBwufh2s736U6VXuhD109LYqPoffg=="],
|
||||
|
||||
"next-auth/@auth/core/preact-render-to-string": ["preact-render-to-string@5.2.3", "", { "dependencies": { "pretty-format": "3.8.0" }, "peerDependencies": { "preact": "10.11.3" } }, "sha512-aPDxUn5o3GhWdtJtW0svRC2SS/l8D9MAgo2+AWml+BhDImb27ALf04Q2d+AHqUUOc6RdSXFIBVa2gxzgMKgtZA=="],
|
||||
}
|
||||
}
|
||||
5
bench/install/next-env.d.ts
vendored
5
bench/install/next-env.d.ts
vendored
@@ -1,5 +0,0 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
@@ -1,10 +0,0 @@
|
||||
/**
|
||||
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially useful
|
||||
* for Docker builds.
|
||||
*/
|
||||
import "./src/env.js";
|
||||
|
||||
/** @type {import("next").NextConfig} */
|
||||
const config = {};
|
||||
|
||||
export default config;
|
||||
@@ -1,52 +1,31 @@
|
||||
{
|
||||
"name": "installbench",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"check": "biome check .",
|
||||
"check:unsafe": "biome check --write --unsafe .",
|
||||
"check:write": "biome check --write .",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"db:push": "drizzle-kit push",
|
||||
"db:studio": "drizzle-kit studio",
|
||||
"dev": "next dev --turbo",
|
||||
"preview": "next build && next start",
|
||||
"start": "next start",
|
||||
"typecheck": "tsc --noEmit"
|
||||
"build": "remix build",
|
||||
"dev": "remix dev",
|
||||
"start": "remix-serve build",
|
||||
"typecheck": "tsc",
|
||||
"clean": "rm -rf node_modules",
|
||||
"bench": "hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'"
|
||||
},
|
||||
"dependencies": {
|
||||
"@auth/drizzle-adapter": "^1.7.2",
|
||||
"@t3-oss/env-nextjs": "^0.12.0",
|
||||
"@tanstack/react-query": "^5.69.0",
|
||||
"@trpc/client": "^11.0.0",
|
||||
"@trpc/react-query": "^11.0.0",
|
||||
"@trpc/server": "^11.0.0",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"esbuild": "^0.25.11",
|
||||
"next": "^15.2.3",
|
||||
"next-auth": "5.0.0-beta.25",
|
||||
"postgres": "^3.4.4",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"server-only": "^0.0.1",
|
||||
"superjson": "^2.2.1",
|
||||
"zod": "^3.24.2"
|
||||
"@remix-run/node": "^1.15.0",
|
||||
"@remix-run/react": "^1.15.0",
|
||||
"@remix-run/serve": "^1.15.0",
|
||||
"isbot": "^3.6.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@tailwindcss/postcss": "^4.0.15",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/react": "^19.0.0",
|
||||
"@types/react-dom": "^19.0.0",
|
||||
"drizzle-kit": "^0.30.5",
|
||||
"postcss": "^8.5.3",
|
||||
"tailwindcss": "^4.0.15",
|
||||
"typescript": "^5.8.2"
|
||||
"@remix-run/dev": "^1.15.0",
|
||||
"@remix-run/eslint-config": "^1.15.0",
|
||||
"@types/react": "^18.0.25",
|
||||
"@types/react-dom": "^18.0.8",
|
||||
"eslint": "^8.27.0",
|
||||
"typescript": "^4.8.4"
|
||||
},
|
||||
"ct3aMetadata": {
|
||||
"initVersion": "7.39.3"
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
bench/install/public/favicon.ico
Normal file
BIN
bench/install/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
14
bench/install/remix.config.js
Normal file
14
bench/install/remix.config.js
Normal file
@@ -0,0 +1,14 @@
|
||||
/** @type {import('@remix-run/dev').AppConfig} */
|
||||
module.exports = {
|
||||
ignoredRouteFiles: ["**/.*"],
|
||||
// appDirectory: "app",
|
||||
// assetsBuildDirectory: "public/build",
|
||||
// serverBuildPath: "build/index.js",
|
||||
// publicPath: "/build/",
|
||||
future: {
|
||||
v2_errorBoundary: true,
|
||||
v2_meta: true,
|
||||
v2_normalizeFormMethod: true,
|
||||
v2_routeConvention: true,
|
||||
},
|
||||
};
|
||||
2
bench/install/remix.env.d.ts
vendored
Normal file
2
bench/install/remix.env.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference types="@remix-run/dev" />
|
||||
/// <reference types="@remix-run/node" />
|
||||
22
bench/install/tsconfig.json
Normal file
22
bench/install/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2019"],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2019",
|
||||
"strict": true,
|
||||
"allowJs": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// Remix takes care of building everything in `remix build`.
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,6 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
// Benchmark for object fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker } from "node:worker_threads";
|
||||
|
||||
const extraProperties = {
|
||||
a: "a!",
|
||||
b: "b!",
|
||||
"second": "c!",
|
||||
bool: true,
|
||||
nully: null,
|
||||
undef: undefined,
|
||||
int: 0,
|
||||
double: 1.234,
|
||||
falsy: false,
|
||||
};
|
||||
|
||||
const objects = {
|
||||
small: { property: "Hello world", ...extraProperties },
|
||||
medium: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
large: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
switch (data.property.length) {
|
||||
case ${objects.small.property.length}:
|
||||
case ${objects.medium.property.length}:
|
||||
case ${objects.large.property.length}: {
|
||||
if (
|
||||
data.a === "a!" &&
|
||||
data.b === "b!" &&
|
||||
data.second === "c!" &&
|
||||
data.bool === true &&
|
||||
data.nully === null &&
|
||||
data.undef === undefined &&
|
||||
data.int === 0 &&
|
||||
data.double === 1.234 &&
|
||||
data.falsy === false) {
|
||||
Atomics.add(int, 0, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
default: {
|
||||
throw new Error("Invalid data object: " + JSON.stringify(data));
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage({ prop: " + fmt(objects.small.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.small);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.medium.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.medium);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.large.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
@@ -1,77 +0,0 @@
|
||||
// Benchmark for string fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker, isMainThread, parentPort } from "node:worker_threads";
|
||||
|
||||
// Test strings of different sizes
|
||||
const strings = {
|
||||
small: "Hello world",
|
||||
medium: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
large: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
Atomics.add(int, 0, 1);
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage(" + fmt(strings.small.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.small);
|
||||
});
|
||||
|
||||
bench("postMessage(" + fmt(strings.medium.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.medium);
|
||||
});
|
||||
|
||||
bench("postMessage(" + fmt(strings.large.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
@@ -1,56 +0,0 @@
|
||||
// Benchmark for string fast path optimization in postMessage and structuredClone
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
// Test strings of different sizes
|
||||
const strings = {
|
||||
small: "Hello world",
|
||||
medium: "Hello World!!!".repeat(1024).split("").join(""),
|
||||
large: "Hello World!!!".repeat(1024).repeat(1024).split("").join(""),
|
||||
};
|
||||
|
||||
console.log("String fast path benchmark");
|
||||
console.log("Comparing pure strings (fast path) vs objects containing strings (traditional)");
|
||||
console.log("For structuredClone, pure strings should have constant time regardless of size.");
|
||||
console.log("");
|
||||
|
||||
// Benchmark structuredClone with pure strings (uses fast path)
|
||||
bench("structuredClone small string (fast path)", () => {
|
||||
structuredClone(strings.small);
|
||||
});
|
||||
|
||||
bench("structuredClone medium string (fast path)", () => {
|
||||
structuredClone(strings.medium);
|
||||
});
|
||||
|
||||
bench("structuredClone large string (fast path)", () => {
|
||||
structuredClone(strings.large);
|
||||
});
|
||||
|
||||
// Benchmark structuredClone with objects containing strings (traditional path)
|
||||
bench("structuredClone object with small string", () => {
|
||||
structuredClone({ str: strings.small });
|
||||
});
|
||||
|
||||
bench("structuredClone object with medium string", () => {
|
||||
structuredClone({ str: strings.medium });
|
||||
});
|
||||
|
||||
bench("structuredClone object with large string", () => {
|
||||
structuredClone({ str: strings.large });
|
||||
});
|
||||
|
||||
// Multiple string cloning benchmark
|
||||
bench("structuredClone 100 small strings", () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
structuredClone(strings.small);
|
||||
}
|
||||
});
|
||||
|
||||
bench("structuredClone 100 small objects", () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
structuredClone({ str: strings.small });
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
import postgres from "postgres";
|
||||
const sql = isBun ? Bun.sql : postgres();
|
||||
const sql = isBun ? Bun.sql : postgres;
|
||||
|
||||
// Create the table if it doesn't exist
|
||||
await sql`
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
let conn;
|
||||
let sql;
|
||||
import * as mariadb from "mariadb";
|
||||
import * as mysql2 from "mysql2/promise";
|
||||
let useMYSQL2 = false;
|
||||
if (process.argv.includes("--mysql2")) {
|
||||
useMYSQL2 = true;
|
||||
}
|
||||
if (isBun) {
|
||||
sql = new Bun.SQL({
|
||||
adapter: "mysql",
|
||||
database: "test",
|
||||
username: "root",
|
||||
});
|
||||
} else {
|
||||
const pool = (useMYSQL2 ? mysql2 : mariadb).createPool({
|
||||
// Add your MariaDB connection details here
|
||||
user: "root",
|
||||
database: "test",
|
||||
});
|
||||
conn = await pool.getConnection();
|
||||
}
|
||||
|
||||
if (isBun) {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await sql`DROP TABLE IF EXISTS test100`;
|
||||
await sql`CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)`;
|
||||
await sql`INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)`;
|
||||
} else {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await conn.query("DROP TABLE IF EXISTS test100");
|
||||
await conn.query(
|
||||
"CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)",
|
||||
);
|
||||
await conn.query(
|
||||
"INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)",
|
||||
);
|
||||
}
|
||||
// Run the benchmark (equivalent to benchFct)
|
||||
const type = isBun ? "Bun.SQL" : useMYSQL2 ? "mysql2" : "mariadb";
|
||||
console.time(type);
|
||||
let promises = [];
|
||||
|
||||
for (let i = 0; i < 100_000; i++) {
|
||||
if (isBun) {
|
||||
promises.push(sql`select * FROM test100`);
|
||||
} else {
|
||||
promises.push(conn.query("select * FROM test100"));
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
console.timeEnd(type);
|
||||
|
||||
// Clean up connection
|
||||
if (!isBun && conn.release) {
|
||||
conn.release();
|
||||
}
|
||||
@@ -9,8 +9,6 @@
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"mariadb": "^3.4.5",
|
||||
"mysql2": "^3.14.3",
|
||||
"postgres": "^3.4.7"
|
||||
}
|
||||
}
|
||||
@@ -4,16 +4,20 @@
|
||||
"": {
|
||||
"name": "react-hello-world",
|
||||
"dependencies": {
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "next",
|
||||
"react-dom": "next",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="],
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="],
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="],
|
||||
"react": ["react@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-l6RbwXa9Peerh9pQEq62DDypxSQfavbybY0wV1vwZ63X0P5VaaEesZAz1KPpnVvXjTtQaOMQsIPvnQwmaVqzTQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "0.24.0-next-b72ed698f-20230303" }, "peerDependencies": { "react": "18.3.0-next-b72ed698f-20230303" } }, "sha512-0Gh/gmTT6H8KxswIQB/8shdTTfs6QIu86nNqZf3Y0RBqIwgTVxRaQVz14/Fw4/Nt81nK/Jt6KT4bx3yvOxZDGQ=="],
|
||||
|
||||
"scheduler": ["scheduler@0.24.0-next-b72ed698f-20230303", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-ct4DMMFbc2kFxCdvbG+i/Jn1S1oqrIFSn2VX/mam+Ya0iuNy+lb8rgT7A+YBUqrQNDaNEqABYI2sOQgqoRxp7w=="],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,14 +4,13 @@
|
||||
"description": "",
|
||||
"main": "react-hello-world.node.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build:workerd": "bun build react-hello-world.workerd.jsx --outfile=react-hello-world.workerd.js --format=esm --production && (echo '// MessageChannel polyfill for workerd'; echo 'if (typeof MessageChannel === \"undefined\") {'; echo ' globalThis.MessageChannel = class MessageChannel {'; echo ' constructor() {'; echo ' this.port1 = { onmessage: null, postMessage: () => {} };'; echo ' this.port2 = {'; echo ' postMessage: (msg) => {'; echo ' if (this.port1.onmessage) {'; echo ' queueMicrotask(() => this.port1.onmessage({ data: msg }));'; echo ' }'; echo ' }'; echo ' };'; echo ' }'; echo ' };'; echo '}'; cat react-hello-world.workerd.js) > temp.js && mv temp.js react-hello-world.workerd.js"
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "Colin McDonnell",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0"
|
||||
"react": "next",
|
||||
"react-dom": "next"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
using Workerd = import "/workerd/workerd.capnp";
|
||||
|
||||
const config :Workerd.Config = (
|
||||
services = [
|
||||
(name = "main", worker = .mainWorker),
|
||||
],
|
||||
|
||||
sockets = [
|
||||
( name = "http",
|
||||
address = "*:3001",
|
||||
http = (),
|
||||
service = "main"
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
const mainWorker :Workerd.Worker = (
|
||||
modules = [
|
||||
(name = "worker", esModule = embed "react-hello-world.workerd.js"),
|
||||
],
|
||||
compatibilityDate = "2025-01-01",
|
||||
compatibilityFlags = ["nodejs_compat_v2"],
|
||||
);
|
||||
File diff suppressed because one or more lines are too long
@@ -1,40 +0,0 @@
|
||||
// Cloudflare Workers version with export default fetch
|
||||
// Run with: workerd serve react-hello-world.workerd.config.capnp
|
||||
|
||||
// Polyfill MessageChannel for workerd
|
||||
if (typeof MessageChannel === 'undefined') {
|
||||
globalThis.MessageChannel = class MessageChannel {
|
||||
constructor() {
|
||||
this.port1 = { onmessage: null, postMessage: () => {} };
|
||||
this.port2 = {
|
||||
postMessage: (msg) => {
|
||||
if (this.port1.onmessage) {
|
||||
queueMicrotask(() => this.port1.onmessage({ data: msg }));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
import React from "react";
|
||||
import { renderToReadableStream } from "react-dom/server";
|
||||
|
||||
const headers = {
|
||||
"Content-Type": "text/html",
|
||||
};
|
||||
|
||||
const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<p>This is an example.</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
export default {
|
||||
async fetch(request) {
|
||||
return new Response(await renderToReadableStream(<App />), { headers });
|
||||
},
|
||||
};
|
||||
@@ -13,4 +13,7 @@ export function run(opts = {}) {
|
||||
}
|
||||
|
||||
export const bench = Mitata.bench;
|
||||
export const group = Mitata.group;
|
||||
|
||||
export function group(_name, fn) {
|
||||
return Mitata.group(fn);
|
||||
}
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const runAll = !process.argv.includes("--simple");
|
||||
|
||||
const small = new Uint8Array(1024);
|
||||
const medium = new Uint8Array(1024 * 100);
|
||||
const large = new Uint8Array(1024 * 1024);
|
||||
|
||||
for (let i = 0; i < large.length; i++) {
|
||||
const value = Math.floor(Math.sin(i / 100) * 128 + 128);
|
||||
if (i < small.length) small[i] = value;
|
||||
if (i < medium.length) medium[i] = value;
|
||||
large[i] = value;
|
||||
}
|
||||
|
||||
const format = new Intl.NumberFormat("en-US", { notation: "compact", unit: "byte" });
|
||||
|
||||
async function compress(data, format) {
|
||||
const cs = new CompressionStream(format);
|
||||
const writer = cs.writable.getWriter();
|
||||
const reader = cs.readable.getReader();
|
||||
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function decompress(data, format) {
|
||||
const ds = new DecompressionStream(format);
|
||||
const writer = ds.writable.getWriter();
|
||||
const reader = ds.readable.getReader();
|
||||
|
||||
writer.write(data);
|
||||
writer.close();
|
||||
|
||||
const chunks = [];
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
}
|
||||
|
||||
const result = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0));
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
result.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async function roundTrip(data, format) {
|
||||
const compressed = await compress(data, format);
|
||||
return await decompress(compressed, format);
|
||||
}
|
||||
|
||||
const formats = ["deflate", "gzip", "deflate-raw"];
|
||||
if (runAll) formats.push("brotli", "zstd");
|
||||
|
||||
// Small data benchmarks (1KB)
|
||||
group(`CompressionStream ${format.format(small.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(small, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Medium data benchmarks (100KB)
|
||||
group(`CompressionStream ${format.format(medium.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(medium, fmt));
|
||||
} catch (e) {}
|
||||
}
|
||||
});
|
||||
|
||||
// Large data benchmarks (1MB)
|
||||
group(`CompressionStream ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await compress(large, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
const compressedData = {};
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
compressedData[fmt] = {
|
||||
small: await compress(small, fmt),
|
||||
medium: await compress(medium, fmt),
|
||||
large: await compress(large, fmt),
|
||||
};
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
|
||||
group(`DecompressionStream ${format.format(small.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].small, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`DecompressionStream ${format.format(medium.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].medium, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`DecompressionStream ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
if (compressedData[fmt]) {
|
||||
bench(fmt, async () => await decompress(compressedData[fmt].large, fmt));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
group(`roundtrip ${format.format(large.length)}`, () => {
|
||||
for (const fmt of formats) {
|
||||
try {
|
||||
new CompressionStream(fmt);
|
||||
bench(fmt, async () => await roundTrip(large, fmt));
|
||||
} catch (e) {
|
||||
// Skip unsupported formats
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -12,9 +12,6 @@ const scenarios = [
|
||||
{ alg: "sha1", digest: "base64" },
|
||||
{ alg: "sha256", digest: "hex" },
|
||||
{ alg: "sha256", digest: "base64" },
|
||||
{ alg: "blake2b512", digest: "hex" },
|
||||
{ alg: "sha512-224", digest: "hex" },
|
||||
{ alg: "sha512-256", digest: "hex" },
|
||||
];
|
||||
|
||||
for (const { alg, digest } of scenarios) {
|
||||
@@ -26,10 +23,6 @@ for (const { alg, digest } of scenarios) {
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher)`, () => {
|
||||
new Bun.CryptoHasher(alg).update(data).digest(digest);
|
||||
});
|
||||
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher.hash)`, () => {
|
||||
return Bun.CryptoHasher.hash(alg, data, digest);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,4 +28,10 @@ bench("brotli compress stream", async () => {
|
||||
await pipeline(source, compress);
|
||||
});
|
||||
|
||||
bench("brotli decompress stream", async () => {
|
||||
const source = Readable.from([compressed]);
|
||||
const decompress = createBrotliDecompress();
|
||||
await pipeline(source, decompress);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -11,10 +11,10 @@ const builtin = ${JSON.stringify(builtin)};
|
||||
const now = performance.now();
|
||||
require(builtin);
|
||||
const end = performance.now();
|
||||
process.stdout.write(JSON.stringify({ builtin, time: end - now }) + "\\n");
|
||||
process.stdout.write(JSON.stringify({builtin, time: end - now}) + "\\n");
|
||||
`,
|
||||
);
|
||||
spawnSync(process.execPath, [path], {
|
||||
const result = spawnSync(typeof Bun !== "undefined" ? "bun" : "node", [path], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
env: {
|
||||
...process.env,
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import npmStripAnsi from "strip-ansi";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let bunStripANSI = null;
|
||||
if (!process.env.FORCE_NPM) {
|
||||
bunStripANSI = globalThis?.Bun?.stripANSI;
|
||||
}
|
||||
|
||||
const stripANSI = bunStripANSI || npmStripAnsi;
|
||||
const formatter = new Intl.NumberFormat();
|
||||
const format = n => {
|
||||
return formatter.format(n);
|
||||
};
|
||||
|
||||
const inputs = [
|
||||
["hello world", "no-ansi"],
|
||||
["\x1b[31mred\x1b[39m", "ansi"],
|
||||
["a".repeat(1024 * 16), "long-no-ansi"],
|
||||
["\x1b[31mred\x1b[39m".repeat(1024 * 16), "long-ansi"],
|
||||
];
|
||||
|
||||
const maxInputLength = Math.max(...inputs.map(([input]) => input.length));
|
||||
|
||||
for (const [input, textLabel] of inputs) {
|
||||
const label = bunStripANSI ? "Bun.stripANSI" : "npm/strip-ansi";
|
||||
const name = `${label} ${format(input.length).padStart(format(maxInputLength).length, " ")} chars ${textLabel}`;
|
||||
|
||||
bench(name, () => {
|
||||
stripANSI(input);
|
||||
});
|
||||
|
||||
if (bunStripANSI && bunStripANSI(input) !== npmStripAnsi(input)) {
|
||||
throw new Error("strip-ansi mismatch");
|
||||
}
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "yaml-benchmark",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0",
|
||||
"yaml": "^2.8.1",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||
|
||||
"js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="],
|
||||
|
||||
"yaml": ["yaml@2.8.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw=="],
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"name": "yaml-benchmark",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0",
|
||||
"yaml": "^2.8.1"
|
||||
}
|
||||
}
|
||||
@@ -1,368 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import jsYaml from "js-yaml";
|
||||
import yaml from "yaml";
|
||||
|
||||
// Small YAML document
|
||||
const smallYaml = `
|
||||
name: John Doe
|
||||
age: 30
|
||||
email: john@example.com
|
||||
active: true
|
||||
`;
|
||||
|
||||
// Medium YAML document with nested structures
|
||||
const mediumYaml = `
|
||||
company: Acme Corp
|
||||
employees:
|
||||
- name: John Doe
|
||||
age: 30
|
||||
position: Developer
|
||||
skills:
|
||||
- JavaScript
|
||||
- TypeScript
|
||||
- Node.js
|
||||
- name: Jane Smith
|
||||
age: 28
|
||||
position: Designer
|
||||
skills:
|
||||
- Figma
|
||||
- Photoshop
|
||||
- Illustrator
|
||||
- name: Bob Johnson
|
||||
age: 35
|
||||
position: Manager
|
||||
skills:
|
||||
- Leadership
|
||||
- Communication
|
||||
- Planning
|
||||
settings:
|
||||
database:
|
||||
host: localhost
|
||||
port: 5432
|
||||
name: mydb
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 3600
|
||||
`;
|
||||
|
||||
// Large YAML document with complex structures
|
||||
const largeYaml = `
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: nginx-deployment
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
replicas: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: nginx
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
containers:
|
||||
- name: nginx
|
||||
image: nginx:1.14.2
|
||||
ports:
|
||||
- containerPort: 80
|
||||
env:
|
||||
- name: ENV_VAR_1
|
||||
value: "value1"
|
||||
- name: ENV_VAR_2
|
||||
value: "value2"
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /etc/nginx
|
||||
resources:
|
||||
limits:
|
||||
cpu: "1"
|
||||
memory: "1Gi"
|
||||
requests:
|
||||
cpu: "0.5"
|
||||
memory: "512Mi"
|
||||
volumes:
|
||||
- name: config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
items:
|
||||
- key: nginx.conf
|
||||
path: nginx.conf
|
||||
- key: mime.types
|
||||
path: mime.types
|
||||
nodeSelector:
|
||||
disktype: ssd
|
||||
tolerations:
|
||||
- key: "key1"
|
||||
operator: "Equal"
|
||||
value: "value1"
|
||||
effect: "NoSchedule"
|
||||
- key: "key2"
|
||||
operator: "Exists"
|
||||
effect: "NoExecute"
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
- key: kubernetes.io/e2e-az-name
|
||||
operator: In
|
||||
values:
|
||||
- e2e-az1
|
||||
- e2e-az2
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 100
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchExpressions:
|
||||
- key: app
|
||||
operator: In
|
||||
values:
|
||||
- web-store
|
||||
topologyKey: kubernetes.io/hostname
|
||||
`;
|
||||
|
||||
// YAML with anchors and references
|
||||
const yamlWithAnchors = `
|
||||
defaults: &defaults
|
||||
adapter: postgresql
|
||||
host: localhost
|
||||
port: 5432
|
||||
|
||||
development:
|
||||
<<: *defaults
|
||||
database: dev_db
|
||||
|
||||
test:
|
||||
<<: *defaults
|
||||
database: test_db
|
||||
|
||||
production:
|
||||
<<: *defaults
|
||||
database: prod_db
|
||||
host: prod.example.com
|
||||
`;
|
||||
|
||||
// Array of items
|
||||
const arrayYaml = `
|
||||
- id: 1
|
||||
name: Item 1
|
||||
price: 10.99
|
||||
tags: [electronics, gadgets]
|
||||
- id: 2
|
||||
name: Item 2
|
||||
price: 25.50
|
||||
tags: [books, education]
|
||||
- id: 3
|
||||
name: Item 3
|
||||
price: 5.00
|
||||
tags: [food, snacks]
|
||||
- id: 4
|
||||
name: Item 4
|
||||
price: 100.00
|
||||
tags: [electronics, computers]
|
||||
- id: 5
|
||||
name: Item 5
|
||||
price: 15.75
|
||||
tags: [clothing, accessories]
|
||||
`;
|
||||
|
||||
// Multiline strings
|
||||
const multilineYaml = `
|
||||
description: |
|
||||
This is a multiline string
|
||||
that preserves line breaks
|
||||
and indentation.
|
||||
|
||||
It can contain multiple paragraphs
|
||||
and special characters: !@#$%^&*()
|
||||
|
||||
folded: >
|
||||
This is a folded string
|
||||
where line breaks are converted
|
||||
to spaces unless there are
|
||||
|
||||
empty lines like above.
|
||||
plain: This is a plain string
|
||||
quoted: "This is a quoted string with \\"escapes\\""
|
||||
literal: 'This is a literal string with ''quotes'''
|
||||
`;
|
||||
|
||||
// Numbers and special values
|
||||
const numbersYaml = `
|
||||
integer: 42
|
||||
negative: -17
|
||||
float: 3.14159
|
||||
scientific: 1.23e-4
|
||||
infinity: .inf
|
||||
negativeInfinity: -.inf
|
||||
notANumber: .nan
|
||||
octal: 0o755
|
||||
hex: 0xFF
|
||||
binary: 0b1010
|
||||
`;
|
||||
|
||||
// Dates and timestamps
|
||||
const datesYaml = `
|
||||
date: 2024-01-15
|
||||
datetime: 2024-01-15T10:30:00Z
|
||||
timestamp: 2024-01-15 10:30:00.123456789 -05:00
|
||||
canonical: 2024-01-15T10:30:00.123456789Z
|
||||
`;
|
||||
|
||||
// Parse benchmarks
|
||||
group("parse small YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(smallYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(smallYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(smallYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse medium YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(mediumYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(mediumYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(mediumYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse large YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(largeYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(largeYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(largeYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with anchors", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(yamlWithAnchors);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(yamlWithAnchors);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(yamlWithAnchors);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML array", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(arrayYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(arrayYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(arrayYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with multiline strings", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(multilineYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(multilineYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(multilineYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with numbers", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(numbersYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(numbersYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(numbersYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with dates", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(datesYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(datesYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(datesYaml);
|
||||
});
|
||||
});
|
||||
|
||||
// // Stringify benchmarks
|
||||
// const smallObjJs = jsYaml.load(smallYaml);
|
||||
// const mediumObjJs = jsYaml.load(mediumYaml);
|
||||
// const largeObjJs = jsYaml.load(largeYaml);
|
||||
|
||||
// group("stringify small object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(smallObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
// group("stringify medium object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(mediumObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
// group("stringify large object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(largeObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
await run();
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user