Compare commits

..

1 Commits

Author SHA1 Message Date
Jarred Sumner
c5f8f05114 Update sql.ts 2025-01-20 20:32:44 -08:00
35098 changed files with 519709 additions and 716403 deletions

View File

@@ -1,78 +0,0 @@
import { spawnSync } from "node:child_process";
import { readFileSync, existsSync } from "node:fs";
import { parseArgs } from "node:util";
const { positionals, values } = parseArgs({
allowPositionals: true,
options: {
help: {
type: "boolean",
short: "h",
default: false,
},
interactive: {
type: "boolean",
short: "i",
default: false,
},
},
});
if (values.help || positionals.length === 0) {
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
console.log("Example: node agent.mjs triage fix bug in authentication");
console.log("Options:");
console.log(" -h, --help Show this help message");
console.log(" -i, --interactive Run in interactive mode");
process.exit(0);
}
const promptName = positionals[0].toUpperCase();
const promptFile = `.agent/${promptName}.md`;
const extraArgs = positionals.slice(1);
if (!existsSync(promptFile)) {
console.error(`Error: Prompt file "${promptFile}" not found`);
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
process.exit(1);
}
try {
let prompt = readFileSync(promptFile, "utf-8");
const githubEnvs = Object.entries(process.env)
.filter(([key]) => key.startsWith("GITHUB_"))
.sort(([a], [b]) => a.localeCompare(b));
if (githubEnvs.length > 0) {
const githubContext = `## GitHub Environment\n\n${githubEnvs
.map(([key, value]) => `**${key}**: \`${value}\``)
.join("\n")}\n\n---\n\n`;
prompt = githubContext + prompt;
}
if (extraArgs.length > 0) {
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
prompt = prompt + extraArgsContext;
}
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
if (!values.interactive) {
claudeArgs.unshift("--print");
}
const { status, error } = spawnSync("claude", claudeArgs, {
stdio: "inherit",
encoding: "utf-8",
});
if (error) {
console.error("Error running claude:", error);
process.exit(1);
}
process.exit(status || 0);
} catch (error) {
console.error(`Error reading prompt file "${promptFile}":`, error);
process.exit(1);
}

View File

@@ -1,15 +1,19 @@
ARG LLVM_VERSION="19"
ARG REPORTED_LLVM_VERSION="19.1.7"
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION
ARG OLD_BUN_VERSION
ARG TARGETARCH
ARG DEFAULT_CXXFLAGS
ARG DEFAULT_CFLAGS
ARG REPORTED_LLVM_VERSION
ENV DEBIAN_FRONTEND=noninteractive \
@@ -60,7 +64,9 @@ RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" ||
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \
CFLAGS=${DEFAULT_CFLAGS} \
CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}"
RUN if [ "$TARGETARCH" = "arm64" ]; then \
export ARCH_PATH="aarch64-linux-gnu"; \

View File

@@ -35,7 +35,7 @@ import {
* @typedef {"musl"} Abi
* @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro
* @typedef {"latest" | "previous" | "oldest" | "eol"} Tier
* @typedef {"release" | "assert" | "debug" | "asan"} Profile
* @typedef {"release" | "assert" | "debug"} Profile
*/
/**
@@ -107,10 +107,9 @@ const buildPlatforms = [
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
{ os: "windows", arch: "x64", release: "2019" },
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
];
@@ -126,16 +125,18 @@ const testPlatforms = [
{ os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "25.04", tier: "latest" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "25.04", tier: "latest" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "25.04", tier: "latest" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20", tier: "latest" },
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
];
@@ -228,7 +229,13 @@ function getRetry(limit = 0) {
manual: {
permit_on_passed: true,
},
automatic: false,
automatic: [
{ exit_status: 1, limit },
{ exit_status: -1, limit: 1 },
{ exit_status: 255, limit: 1 },
{ signal_reason: "cancel", limit: 1 },
{ signal_reason: "agent_stop", limit: 1 },
],
};
}
@@ -309,19 +316,6 @@ function getCppAgent(platform, options) {
});
}
/**
* @returns {Platform}
*/
function getZigPlatform() {
return {
os: "linux",
arch: "aarch64",
abi: "musl",
distro: "alpine",
release: "3.21",
};
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
@@ -329,15 +323,9 @@ function getZigPlatform() {
*/
function getZigAgent(platform, options) {
const { arch } = platform;
// Uncomment to restore to using macOS on-prem for Zig.
// return {
// queue: "build-zig",
// };
return getEc2Agent(getZigPlatform(), options, {
instanceType: "r8g.large",
});
return {
queue: "build-zig",
};
}
/**
@@ -390,32 +378,22 @@ function getTestAgent(platform, options) {
* @returns {Record<string, string | undefined>}
*/
function getBuildEnv(target, options) {
const { baseline, abi } = target;
const { profile, baseline, abi } = target;
const release = !profile || profile === "release";
const { canary } = options;
const revision = typeof canary === "number" ? canary : 1;
return {
CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
CANARY_REVISION: revision,
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
ENABLE_LOGS: release ? "OFF" : "ON",
ABI: abi === "musl" ? "musl" : undefined,
CMAKE_VERBOSE_MAKEFILE: "ON",
CMAKE_TLS_VERIFY: "0",
};
}
/**
* @param {Target} target
* @param {PipelineOptions} options
* @returns {string}
*/
function getBuildCommand(target, options) {
const { profile } = target;
const label = profile || "release";
return `bun run build:${label}`;
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
@@ -429,7 +407,7 @@ function getBuildVendorStep(platform, options) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: `${getBuildCommand(platform, options)} --target dependencies`,
command: "bun run build:ci --target dependencies",
};
}
@@ -439,7 +417,6 @@ function getBuildVendorStep(platform, options) {
* @returns {Step}
*/
function getBuildCppStep(platform, options) {
const command = getBuildCommand(platform, options);
return {
key: `${getTargetKey(platform)}-build-cpp`,
label: `${getTargetLabel(platform)} - build-cpp`,
@@ -450,10 +427,7 @@ function getBuildCppStep(platform, options) {
BUN_CPP_ONLY: "ON",
...getBuildEnv(platform, options),
},
// We used to build the C++ dependencies and bun in separate steps.
// However, as long as the zig build takes longer than both sequentially,
// it's cheaper to run them in the same step. Can be revisited in the future.
command: [`${command} --target bun`, `${command} --target dependencies`],
command: "bun run build:ci --target bun",
};
}
@@ -487,8 +461,7 @@ function getBuildZigStep(platform, options) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: `${getBuildCommand(platform, options)} --target bun-zig --toolchain ${toolchain}`,
timeout_in_minutes: 35,
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
};
}
@@ -501,7 +474,11 @@ function getLinkBunStep(platform, options) {
return {
key: `${getTargetKey(platform)}-build-bun`,
label: `${getTargetLabel(platform)} - build-bun`,
depends_on: [`${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`],
depends_on: [
`${getTargetKey(platform)}-build-vendor`,
`${getTargetKey(platform)}-build-cpp`,
`${getTargetKey(platform)}-build-zig`,
],
agents: getCppAgent(platform, options),
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
@@ -509,7 +486,7 @@ function getLinkBunStep(platform, options) {
BUN_LINK_ONLY: "ON",
...getBuildEnv(platform, options),
},
command: `${getBuildCommand(platform, options)} --target bun`,
command: "bun run build:ci --target bun",
};
}
@@ -526,7 +503,7 @@ function getBuildBunStep(platform, options) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: getBuildCommand(platform, options),
command: "bun run build:ci",
};
}
@@ -545,7 +522,7 @@ function getBuildBunStep(platform, options) {
* @returns {Step}
*/
function getTestBunStep(platform, options, testOptions = {}) {
const { os, profile } = platform;
const { os } = platform;
const { buildId, unifiedTests, testFiles } = testOptions;
const args = [`--step=${getTargetKey(platform)}-build-bun`];
@@ -569,7 +546,6 @@ function getTestBunStep(platform, options, testOptions = {}) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
timeout_in_minutes: profile === "asan" ? 45 : 30,
command:
os === "windows"
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
@@ -642,22 +618,6 @@ function getReleaseStep(buildPlatforms, options) {
};
}
/**
* @param {Platform[]} buildPlatforms
* @returns {Step}
*/
function getBenchmarkStep() {
return {
key: "benchmark",
label: "📊",
agents: {
queue: "build-image",
},
depends_on: `linux-x64-build-bun`,
command: "node .buildkite/scripts/upload-benchmark.mjs",
};
}
/**
* @typedef {Object} Pipeline
* @property {Step[]} [steps]
@@ -744,6 +704,7 @@ function getBenchmarkStep() {
* @property {string | boolean} [buildImages]
* @property {string | boolean} [publishImages]
* @property {number} [canary]
* @property {Profile[]} [buildProfiles]
* @property {Platform[]} [buildPlatforms]
* @property {Platform[]} [testPlatforms]
* @property {string[]} [testFiles]
@@ -838,10 +799,6 @@ function getOptionsStep() {
label: `${getEmoji("assert")} Release with Assertions`,
value: "assert",
},
{
label: `${getEmoji("asan")} Release with ASAN`,
value: "asan",
},
{
label: `${getEmoji("debug")} Debug`,
value: "debug",
@@ -922,7 +879,7 @@ function getOptionsStep() {
{
key: "unified-builds",
select: "Do you want to build each platform in a single step?",
hint: "If true, builds will not be split into separate steps (this will likely slow down the build)",
hint: "If true, builds will not be split into seperate steps (this will likely slow down the build)",
required: false,
default: "false",
options: booleanOptions,
@@ -930,7 +887,7 @@ function getOptionsStep() {
{
key: "unified-tests",
select: "Do you want to run tests in a single step?",
hint: "If true, tests will not be split into separate steps (this will be very slow)",
hint: "If true, tests will not be split into seperate steps (this will be very slow)",
required: false,
default: "false",
options: booleanOptions,
@@ -964,13 +921,8 @@ async function getPipelineOptions() {
return;
}
let filteredBuildPlatforms = buildPlatforms;
if (isMainBranch()) {
filteredBuildPlatforms = buildPlatforms.filter(({ profile }) => profile !== "asan");
}
const canary = await getCanaryRevision();
const buildPlatformsMap = new Map(filteredBuildPlatforms.map(platform => [getTargetKey(platform), platform]));
const buildPlatformsMap = new Map(buildPlatforms.map(platform => [getTargetKey(platform), platform]));
const testPlatformsMap = new Map(testPlatforms.map(platform => [getPlatformKey(platform), platform]));
if (isManual) {
@@ -989,7 +941,6 @@ async function getPipelineOptions() {
?.map(item => item.trim())
?.filter(Boolean);
const buildProfiles = parseArray(options["build-profiles"]);
const buildPlatformKeys = parseArray(options["build-platforms"]);
const testPlatformKeys = parseArray(options["test-platforms"]);
return {
@@ -1002,11 +953,12 @@ async function getPipelineOptions() {
testFiles: parseArray(options["test-files"]),
unifiedBuilds: parseBoolean(options["unified-builds"]),
unifiedTests: parseBoolean(options["unified-tests"]),
buildProfiles: parseArray(options["build-profiles"]),
buildPlatforms: buildPlatformKeys?.length
? buildPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...buildPlatformsMap.get(key), profile })))
? buildPlatformKeys.map(key => buildPlatformsMap.get(key))
: Array.from(buildPlatformsMap.values()),
testPlatforms: testPlatformKeys?.length
? testPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...testPlatformsMap.get(key), profile })))
? testPlatformKeys.map(key => testPlatformsMap.get(key))
: Array.from(testPlatformsMap.values()),
dryRun: parseBoolean(options["dry-run"]),
};
@@ -1041,6 +993,7 @@ async function getPipelineOptions() {
publishImages: parseOption(/\[(publish images?)\]/i),
buildPlatforms: Array.from(buildPlatformsMap.values()),
testPlatforms: Array.from(testPlatformsMap.values()),
buildProfiles: ["release"],
};
}
@@ -1063,7 +1016,7 @@ async function getPipeline(options = {}) {
return;
}
const { buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options;
const { buildProfiles = [], buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options;
const imagePlatforms = new Map(
buildImages || publishImages
? [...buildPlatforms, ...testPlatforms]
@@ -1098,33 +1051,29 @@ async function getPipeline(options = {}) {
}
}
const includeASAN = !isMainBranch();
if (!buildId) {
const relevantBuildPlatforms = includeASAN
? buildPlatforms
: buildPlatforms.filter(({ profile }) => profile !== "asan");
steps.push(
...relevantBuildPlatforms.map(target => {
const imageKey = getImageKey(target);
const zigImageKey = getImageKey(getZigPlatform());
const dependsOn = imagePlatforms.has(zigImageKey) ? [`${zigImageKey}-build-image`] : [];
if (imagePlatforms.has(imageKey)) {
dependsOn.push(`${imageKey}-build-image`);
}
...buildPlatforms
.flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile })))
.map(target => {
const imageKey = getImageKey(target);
return getStepWithDependsOn(
{
key: getTargetKey(target),
group: getTargetLabel(target),
steps: unifiedBuilds
? [getBuildBunStep(target, options)]
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
},
...dependsOn,
);
}),
return getStepWithDependsOn(
{
key: getTargetKey(target),
group: getTargetLabel(target),
steps: unifiedBuilds
? [getBuildBunStep(target, options)]
: [
getBuildVendorStep(target, options),
getBuildCppStep(target, options),
getBuildZigStep(target, options),
getLinkBunStep(target, options),
],
},
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
);
}),
);
}
@@ -1132,11 +1081,13 @@ async function getPipeline(options = {}) {
const { skipTests, forceTests, unifiedTests, testFiles } = options;
if (!skipTests || forceTests) {
steps.push(
...testPlatforms.map(target => ({
key: getTargetKey(target),
group: getTargetLabel(target),
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
})),
...testPlatforms
.flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile })))
.map(target => ({
key: getTargetKey(target),
group: getTargetLabel(target),
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
})),
);
}
}
@@ -1144,7 +1095,6 @@ async function getPipeline(options = {}) {
if (isMainBranch()) {
steps.push(getReleaseStep(buildPlatforms, options));
}
steps.push(getBenchmarkStep());
/** @type {Map<string, GroupStep>} */
const stepsByGroup = new Map();

View File

@@ -1,255 +0,0 @@
# macOS Runner Infrastructure - Claude Development Guide
This document provides context and guidance for Claude to work on the macOS runner infrastructure.
## Overview
This infrastructure provides automated, scalable macOS CI runners for Bun using MacStadium's Orka platform. It implements complete job isolation, daily image rebuilds, and comprehensive testing.
## Architecture
### Core Components
- **Packer**: Builds VM images with all required software
- **Terraform**: Manages VM fleet with auto-scaling
- **GitHub Actions**: Automates daily rebuilds and deployments
- **User Management**: Creates isolated users per job (`bk-<job-id>`)
### Key Features
- **Complete Job Isolation**: Each Buildkite job runs in its own user account
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh environments
- **Flakiness Testing**: Multiple test iterations ensure reliability (80% success rate minimum)
- **Software Validation**: All tools tested for proper installation and functionality
- **Version Synchronization**: Exact versions match bootstrap.sh requirements
## File Structure
```
.buildkite/macos-runners/
├── packer/
│ └── macos-base.pkr.hcl # VM image building configuration
├── terraform/
│ ├── main.tf # Infrastructure definition
│ ├── variables.tf # Configuration variables
│ ├── outputs.tf # Resource outputs
│ └── user-data.sh # VM initialization script
├── scripts/
│ ├── bootstrap-macos.sh # macOS software installation
│ ├── create-build-user.sh # User creation for job isolation
│ ├── cleanup-build-user.sh # User cleanup after jobs
│ └── job-runner.sh # Main job lifecycle management
├── github-actions/
│ ├── image-rebuild.yml # Daily image rebuild workflow
│ └── deploy-fleet.yml # Fleet deployment workflow
├── README.md # User documentation
├── DEPLOYMENT.md # Deployment guide
└── CLAUDE.md # This file
```
## Software Versions (Must Match bootstrap.sh)
These versions are synchronized with `/scripts/bootstrap.sh`:
- **Node.js**: 24.3.0 (exact)
- **Bun**: 1.2.17 (exact)
- **LLVM**: 19.1.7 (exact)
- **CMake**: 3.30.5 (exact)
- **Buildkite Agent**: 3.87.0
## Key Scripts
### bootstrap-macos.sh
- Installs all required software with exact versions
- Configures development environment
- Sets up Tailscale, Docker, and other dependencies
- **Critical**: Must stay synchronized with main bootstrap.sh
### create-build-user.sh
- Creates unique user per job: `bk-<job-id>`
- Sets up isolated environment with proper permissions
- Configures shell environment and paths
- Creates workspace directories
### cleanup-build-user.sh
- Kills all processes owned by build user
- Removes user account and home directory
- Cleans up temporary files and caches
- Ensures complete isolation between jobs
### job-runner.sh
- Main orchestration script
- Manages job lifecycle: create user → run job → cleanup
- Handles timeouts and health checks
- Runs as root via LaunchDaemon
## GitHub Actions Workflows
### image-rebuild.yml
- Runs daily at 2 AM UTC
- Detects changes to trigger rebuilds
- Builds images for macOS 13, 14, 15
- **Validation Steps**:
- Software installation verification
- Flakiness testing (3 iterations, 80% success rate)
- Health endpoint testing
- Discord notifications for status
### deploy-fleet.yml
- Manual deployment trigger
- Validates inputs and plans changes
- Deploys VM fleet with health checks
- Supports different environments (prod/staging/dev)
## Required Secrets
### MacStadium
- `MACSTADIUM_API_KEY`: API access key
- `ORKA_ENDPOINT`: Orka API endpoint
- `ORKA_AUTH_TOKEN`: Authentication token
### AWS
- `AWS_ACCESS_KEY_ID`: For Terraform state storage
- `AWS_SECRET_ACCESS_KEY`: For Terraform state storage
### Buildkite
- `BUILDKITE_AGENT_TOKEN`: Agent registration token
- `BUILDKITE_API_TOKEN`: For monitoring/status checks
- `BUILDKITE_ORG`: Organization slug
### GitHub
- `GITHUB_TOKEN`: For private repository access
### Notifications
- `DISCORD_WEBHOOK_URL`: For status notifications
## Development Guidelines
### Adding New Software
1. Update `bootstrap-macos.sh` with installation commands
2. Add version verification in the script
3. Include in validation tests in `image-rebuild.yml`
4. Update documentation in README.md
### Modifying User Isolation
1. Update `create-build-user.sh` for user creation
2. Update `cleanup-build-user.sh` for cleanup
3. Test isolation in `job-runner.sh`
4. Ensure proper permissions and security
### Updating VM Configuration
1. Modify `terraform/variables.tf` for fleet sizing
2. Update `terraform/main.tf` for infrastructure changes
3. Test deployment with `deploy-fleet.yml`
4. Update documentation
### Version Updates
1. **Critical**: Check `/scripts/bootstrap.sh` for version changes
2. Update exact versions in `bootstrap-macos.sh`
3. Update version verification in workflows
4. Update documentation
## Testing Strategy
### Image Validation
- Software installation verification
- Version checking for exact matches
- Health endpoint testing
- Basic functionality tests
### Flakiness Testing
- 3 test iterations per image
- 80% success rate minimum
- Tests basic commands, Node.js, Bun, build tools
- Automated cleanup of test VMs
### Integration Testing
- End-to-end job execution
- User isolation verification
- Resource cleanup validation
- Performance monitoring
## Troubleshooting
### Common Issues
1. **Version Mismatches**: Check bootstrap.sh for updates
2. **User Cleanup Failures**: Check process termination and file permissions
3. **Image Build Failures**: Check Packer logs and VM resources
4. **Flakiness**: Investigate VM performance and network issues
### Debugging Commands
```bash
# Check VM status
orka vm list
# Check image status
orka image list
# Test user creation
sudo /usr/local/bin/bun-ci/create-build-user.sh
# Check health endpoint
curl http://localhost:8080/health
# View logs
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
```
## Performance Considerations
### Resource Management
- VMs configured with 12 CPU cores, 32GB RAM
- Auto-scaling based on queue demand
- Aggressive cleanup to prevent resource leaks
### Cost Optimization
- Automated cleanup of old images and snapshots
- Efficient VM sizing based on workload requirements
- Scheduled maintenance windows
## Security
### Isolation
- Complete process isolation per job
- Separate user accounts with unique UIDs
- Cleanup of all user data after jobs
### Network Security
- VPC isolation with security groups
- Limited SSH access for debugging
- Encrypted communications
### Credential Management
- Secure secret storage in GitHub
- No hardcoded credentials in code
- Regular rotation of access tokens
## Monitoring
### Health Checks
- HTTP endpoints on port 8080
- Buildkite agent connectivity monitoring
- Resource usage tracking
### Alerts
- Discord notifications for failures
- Build status reporting
- Fleet deployment notifications
## Next Steps for Development
1. **Monitor bootstrap.sh**: Watch for version updates that need synchronization
2. **Performance Optimization**: Monitor resource usage and optimize VM sizes
3. **Enhanced Testing**: Add more comprehensive validation tests
4. **Cost Monitoring**: Track usage and optimize for cost efficiency
5. **Security Hardening**: Regular security reviews and updates
## References
- [MacStadium Orka Documentation](https://orkadocs.macstadium.com/)
- [Packer Documentation](https://www.packer.io/docs)
- [Terraform Documentation](https://www.terraform.io/docs)
- [Buildkite Agent Documentation](https://buildkite.com/docs/agent/v3)
- [Main bootstrap.sh](../../scripts/bootstrap.sh) - **Keep synchronized!**
---
**Important**: This infrastructure is critical for Bun's CI/CD pipeline. Always test changes thoroughly and maintain backward compatibility. The `bootstrap-macos.sh` script must stay synchronized with the main `bootstrap.sh` script to ensure consistent environments.

View File

@@ -1,428 +0,0 @@
# macOS Runner Deployment Guide
This guide provides step-by-step instructions for deploying the macOS runner infrastructure for Bun CI.
## Prerequisites
### 1. MacStadium Account Setup
1. **Create MacStadium Account**
- Sign up at [MacStadium](https://www.macstadium.com/)
- Purchase Orka plan with appropriate VM allocation
2. **Configure API Access**
- Generate API key from MacStadium dashboard
- Note down your Orka endpoint URL
- Test API connectivity
3. **Base Image Preparation**
- Ensure base macOS images are available in your account
- Verify image naming convention: `base-images/macos-{version}-{name}`
### 2. AWS Account Setup
1. **Create AWS Account**
- Set up AWS account for Terraform state storage
- Create S3 bucket for Terraform backend: `bun-terraform-state`
2. **Configure IAM**
- Create IAM user with appropriate permissions
- Generate access key and secret key
- Attach policies for S3, CloudWatch, and EC2 (if using AWS resources)
### 3. GitHub Repository Setup
1. **Fork or Clone Repository**
- Ensure you have admin access to the repository
- Create necessary branches for deployment
2. **Configure Repository Secrets**
- Add all required secrets (see main README.md)
- Test secret accessibility
### 4. Buildkite Setup
1. **Organization Configuration**
- Create or access Buildkite organization
- Generate agent token with appropriate permissions
- Note organization slug
2. **Queue Configuration**
- Create queues: `macos`, `macos-arm64`, `macos-x86_64`
- Configure queue-specific settings
## Step-by-Step Deployment
### Step 1: Environment Preparation
1. **Install Required Tools**
```bash
# Install Terraform
wget https://releases.hashicorp.com/terraform/1.6.0/terraform_1.6.0_linux_amd64.zip
unzip terraform_1.6.0_linux_amd64.zip
sudo mv terraform /usr/local/bin/
# Install Packer
wget https://releases.hashicorp.com/packer/1.9.4/packer_1.9.4_linux_amd64.zip
unzip packer_1.9.4_linux_amd64.zip
sudo mv packer /usr/local/bin/
# Install AWS CLI
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
# Install MacStadium CLI
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
sudo mv orka-cli /usr/local/bin/orka
```
2. **Configure AWS Credentials**
```bash
aws configure
# Enter your AWS access key, secret key, and region
```
3. **Configure MacStadium CLI**
```bash
orka config set endpoint <your-orka-endpoint>
orka auth token <your-orka-token>
```
### Step 2: SSH Key Setup
1. **Generate SSH Key Pair**
```bash
ssh-keygen -t rsa -b 4096 -f ~/.ssh/bun-runner -N ""
```
2. **Copy Public Key to Terraform Directory**
```bash
mkdir -p .buildkite/macos-runners/terraform/ssh-keys
cp ~/.ssh/bun-runner.pub .buildkite/macos-runners/terraform/ssh-keys/bun-runner.pub
```
### Step 3: Terraform Backend Setup
1. **Create S3 Bucket for Terraform State**
```bash
aws s3 mb s3://bun-terraform-state --region us-west-2
aws s3api put-bucket-versioning --bucket bun-terraform-state --versioning-configuration Status=Enabled
aws s3api put-bucket-encryption --bucket bun-terraform-state --server-side-encryption-configuration '{
"Rules": [
{
"ApplyServerSideEncryptionByDefault": {
"SSEAlgorithm": "AES256"
}
}
]
}'
```
2. **Create Terraform Variables File**
```bash
cd .buildkite/macos-runners/terraform
cat > production.tfvars << EOF
environment = "production"
macstadium_api_key = "your-macstadium-api-key"
buildkite_agent_token = "your-buildkite-agent-token"
github_token = "your-github-token"
fleet_size = {
macos_13 = 4
macos_14 = 6
macos_15 = 8
}
vm_configuration = {
cpu_count = 12
memory_gb = 32
disk_size = 500
}
EOF
```
### Step 4: Build VM Images
1. **Validate Packer Configuration**
```bash
cd .buildkite/macos-runners/packer
packer validate -var "macos_version=15" macos-base.pkr.hcl
```
2. **Build macOS 15 Image**
```bash
packer build \
-var "macos_version=15" \
-var "orka_endpoint=<your-orka-endpoint>" \
-var "orka_auth_token=<your-orka-token>" \
macos-base.pkr.hcl
```
3. **Build macOS 14 Image**
```bash
packer build \
-var "macos_version=14" \
-var "orka_endpoint=<your-orka-endpoint>" \
-var "orka_auth_token=<your-orka-token>" \
macos-base.pkr.hcl
```
4. **Build macOS 13 Image**
```bash
packer build \
-var "macos_version=13" \
-var "orka_endpoint=<your-orka-endpoint>" \
-var "orka_auth_token=<your-orka-token>" \
macos-base.pkr.hcl
```
### Step 5: Deploy VM Fleet
1. **Initialize Terraform**
```bash
cd .buildkite/macos-runners/terraform
terraform init
```
2. **Create Production Workspace**
```bash
terraform workspace new production
```
3. **Plan Deployment**
```bash
terraform plan -var-file="production.tfvars"
```
4. **Apply Deployment**
```bash
terraform apply -var-file="production.tfvars"
```
### Step 6: Verify Deployment
1. **Check VM Status**
```bash
orka vm list
```
2. **Check Terraform Outputs**
```bash
terraform output
```
3. **Test VM Connectivity**
```bash
# Get VM IP from terraform output
VM_IP=$(terraform output -json vm_instances | jq -r '.value | to_entries[0].value.ip_address')
# Test SSH connectivity
ssh -i ~/.ssh/bun-runner admin@$VM_IP
# Test health endpoint
curl http://$VM_IP:8080/health
```
4. **Verify Buildkite Agent Connectivity**
```bash
curl -H "Authorization: Bearer <your-buildkite-api-token>" \
"https://api.buildkite.com/v2/organizations/<your-org>/agents"
```
### Step 7: Configure GitHub Actions
1. **Enable GitHub Actions Workflows**
- Navigate to repository Actions tab
- Enable workflows if not already enabled
2. **Test Image Rebuild Workflow**
```bash
# Trigger manual rebuild
gh workflow run image-rebuild.yml
```
3. **Test Fleet Deployment Workflow**
```bash
# Trigger manual deployment
gh workflow run deploy-fleet.yml
```
## Post-Deployment Configuration
### 1. Monitoring Setup
1. **CloudWatch Dashboards**
- Create custom dashboards for VM metrics
- Set up alarms for critical thresholds
2. **Discord Notifications**
- Configure Discord webhook for alerts
- Test notification delivery
### 2. Backup Configuration
1. **Enable Automated Snapshots**
```bash
# Update terraform configuration
backup_config = {
enable_snapshots = true
snapshot_schedule = "0 4 * * *"
snapshot_retention = 7
}
```
2. **Test Backup Restoration**
- Create test snapshot
- Verify restoration process
### 3. Security Hardening
1. **Review Security Groups**
- Minimize open ports
- Restrict source IP ranges
2. **Enable Audit Logging**
- Configure CloudTrail for AWS resources
- Enable MacStadium audit logs
### 4. Performance Optimization
1. **Monitor Resource Usage**
- Review CPU, memory, disk usage
- Adjust VM sizes if needed
2. **Optimize Auto-Scaling**
- Monitor scaling events
- Adjust thresholds as needed
## Maintenance Procedures
### Daily Maintenance
1. **Automated Tasks**
- Image rebuilds (automatic)
- Health checks (automatic)
- Cleanup processes (automatic)
2. **Manual Monitoring**
- Check Discord notifications
- Review CloudWatch metrics
- Monitor Buildkite queue
### Weekly Maintenance
1. **Review Metrics**
- Analyze performance trends
- Check cost optimization opportunities
2. **Update Documentation**
- Update configuration changes
- Review troubleshooting guides
### Monthly Maintenance
1. **Capacity Planning**
- Review usage patterns
- Plan capacity adjustments
2. **Security Updates**
- Review security patches
- Update base images if needed
## Troubleshooting Common Issues
### Issue: VM Creation Fails
```bash
# Check MacStadium account limits
orka account info
# Check available resources
orka resource list
# Review Packer logs
tail -f packer-build.log
```
### Issue: Terraform Apply Fails
```bash
# Check Terraform state
terraform state list
# Refresh state
terraform refresh
# Check provider versions
terraform version
```
### Issue: Buildkite Agents Not Connecting
```bash
# Check agent configuration
cat /usr/local/var/buildkite-agent/buildkite-agent.cfg
# Check agent logs
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
# Restart agent service
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
```
## Rollback Procedures
### Rollback VM Fleet
1. **Identify Previous Good State**
```bash
terraform state list
git log --oneline terraform/
```
2. **Rollback to Previous Configuration**
```bash
git checkout <previous-commit>
terraform plan -var-file="production.tfvars"
terraform apply -var-file="production.tfvars"
```
### Rollback VM Images
1. **List Available Images**
```bash
orka image list
```
2. **Update Terraform to Use Previous Images**
```bash
# Edit terraform configuration to use previous image IDs
terraform plan -var-file="production.tfvars"
terraform apply -var-file="production.tfvars"
```
## Cost Optimization Tips
1. **Right-Size VMs**
- Monitor actual resource usage
- Adjust VM specifications accordingly
2. **Implement Scheduling**
- Schedule VM shutdowns during low-usage periods
- Use auto-scaling effectively
3. **Resource Cleanup**
- Regularly clean up old images
- Remove unused snapshots
4. **Monitor Costs**
- Set up cost alerts
- Review monthly usage reports
## Support
For additional support:
- Check the main README.md for troubleshooting
- Review GitHub Actions logs
- Contact MacStadium support for platform issues
- Open issues in the repository for infrastructure problems

View File

@@ -1,374 +0,0 @@
# macOS Runner Infrastructure
This directory contains the infrastructure-as-code for deploying and managing macOS CI runners for the Bun project. It is located in the `.buildkite` folder alongside other CI configuration. The infrastructure provides automated, scalable, and reliable macOS build environments using MacStadium's Orka platform.
## Architecture Overview
The infrastructure consists of several key components:
1. **VM Images**: Golden images built with Packer containing all necessary software
2. **VM Fleet**: Terraform-managed fleet of macOS VMs across different versions
3. **User Isolation**: Per-job user creation and cleanup for complete isolation
4. **Automation**: GitHub Actions workflows for daily image rebuilds and fleet management
## Key Features
- **Complete Isolation**: Each Buildkite job runs in its own user account
- **Automatic Cleanup**: Processes and temporary files are cleaned up after each job
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh, up-to-date environments
- **Multi-Version Support**: Supports macOS 13, 14, and 15 simultaneously
- **Auto-Scaling**: Automatic scaling based on job queue demand
- **Health Monitoring**: Continuous health checks and monitoring
- **Cost Optimization**: Efficient resource utilization and cleanup
## Directory Structure
```
.buildkite/macos-runners/
├── packer/ # Packer configuration for VM images
│ ├── macos-base.pkr.hcl # Main Packer configuration
│ └── ssh-keys/ # SSH keys for VM access
├── terraform/ # Terraform configuration for VM fleet
│ ├── main.tf # Main Terraform configuration
│ ├── variables.tf # Variable definitions
│ ├── outputs.tf # Output definitions
│ └── user-data.sh # VM initialization script
├── scripts/ # Management and utility scripts
│ ├── bootstrap-macos.sh # macOS-specific bootstrap script
│ ├── create-build-user.sh # User creation script
│ ├── cleanup-build-user.sh # User cleanup script
│ └── job-runner.sh # Main job runner script
├── github-actions/ # GitHub Actions workflows
│ ├── image-rebuild.yml # Daily image rebuild workflow
│ └── deploy-fleet.yml # Fleet deployment workflow
└── README.md # This file
```
## Prerequisites
Before deploying the infrastructure, ensure you have:
1. **MacStadium Account**: Active MacStadium Orka account with API access
2. **AWS Account**: For Terraform state storage and CloudWatch monitoring
3. **GitHub Repository**: With required secrets configured
4. **Buildkite Account**: With organization and agent tokens
5. **Required Tools**: Packer, Terraform, AWS CLI, and MacStadium CLI
## Required Secrets
Configure the following secrets in your GitHub repository:
### MacStadium
- `MACSTADIUM_API_KEY`: MacStadium API key
- `ORKA_ENDPOINT`: MacStadium Orka API endpoint
- `ORKA_AUTH_TOKEN`: MacStadium authentication token
### AWS
- `AWS_ACCESS_KEY_ID`: AWS access key ID
- `AWS_SECRET_ACCESS_KEY`: AWS secret access key
### Buildkite
- `BUILDKITE_AGENT_TOKEN`: Buildkite agent token
- `BUILDKITE_API_TOKEN`: Buildkite API token (for monitoring)
- `BUILDKITE_ORG`: Buildkite organization slug
### GitHub
- `GITHUB_TOKEN`: GitHub personal access token (for private repositories)
### Notifications
- `DISCORD_WEBHOOK_URL`: Discord webhook URL for notifications
## Quick Start
### 1. Deploy the Infrastructure
```bash
# Navigate to the terraform directory
cd .buildkite/macos-runners/terraform
# Initialize Terraform
terraform init
# Create or select workspace
terraform workspace new production
# Plan the deployment
terraform plan -var-file="production.tfvars"
# Apply the deployment
terraform apply -var-file="production.tfvars"
```
### 2. Build VM Images
```bash
# Navigate to the packer directory
cd .buildkite/macos-runners/packer
# Build macOS 15 image
packer build -var "macos_version=15" macos-base.pkr.hcl
# Build macOS 14 image
packer build -var "macos_version=14" macos-base.pkr.hcl
# Build macOS 13 image
packer build -var "macos_version=13" macos-base.pkr.hcl
```
### 3. Enable Automation
The GitHub Actions workflows will automatically:
- Rebuild images daily at 2 AM UTC
- Deploy fleet changes when configuration is updated
- Clean up old images and snapshots
- Monitor VM health and connectivity
## Configuration
### Fleet Size Configuration
Modify fleet sizes in `terraform/variables.tf`:
```hcl
variable "fleet_size" {
default = {
macos_13 = 4 # Number of macOS 13 VMs
macos_14 = 6 # Number of macOS 14 VMs
macos_15 = 8 # Number of macOS 15 VMs
}
}
```
### VM Configuration
Adjust VM specifications in `terraform/variables.tf`:
```hcl
variable "vm_configuration" {
default = {
cpu_count = 12 # Number of CPU cores
memory_gb = 32 # Memory in GB
disk_size = 500 # Disk size in GB
}
}
```
### Auto-Scaling Configuration
Configure auto-scaling parameters:
```hcl
variable "autoscaling_config" {
default = {
min_size = 2
max_size = 30
desired_capacity = 10
scale_up_threshold = 80
scale_down_threshold = 20
scale_up_adjustment = 2
scale_down_adjustment = 1
cooldown_period = 300
}
}
```
## Software Included
Each VM image includes:
### Development Tools
- Xcode Command Line Tools
- LLVM/Clang 19.1.7 (exact version)
- CMake 3.30.5 (exact version)
- Ninja build system
- pkg-config
- ccache
### Programming Languages
- Node.js 24.3.0 (exact version, matches bootstrap.sh)
- Bun 1.2.17 (exact version, matches bootstrap.sh)
- Python 3.11 and 3.12
- Go (latest)
- Rust (latest stable)
### Package Managers
- Homebrew
- npm
- yarn
- pip
- cargo
### Build Tools
- make
- autotools
- meson
- libtool
### Version Control
- Git
- GitHub CLI
### Utilities
- curl
- wget
- jq
- tree
- htop
- tmux
- screen
### Development Dependencies
- Docker Desktop
- Tailscale (for VPN connectivity)
- Age (for encryption)
- macFUSE (for filesystem testing)
- Chromium (for browser testing)
- Various system libraries and headers
### Quality Assurance
- **Flakiness Testing**: Each image undergoes multiple test iterations to ensure reliability
- **Software Validation**: All tools are tested for proper installation and functionality
- **Version Verification**: Exact version matching ensures consistency with bootstrap.sh
## User Isolation
Each Buildkite job runs in complete isolation:
1. **Unique User**: Each job gets a unique user account (`bk-<job-id>`)
2. **Isolated Environment**: Separate home directory and environment variables
3. **Process Isolation**: All processes are killed after job completion
4. **File System Cleanup**: Temporary files and caches are cleaned up
5. **Network Isolation**: No shared network resources between jobs
## Monitoring and Alerting
The infrastructure includes comprehensive monitoring:
- **Health Checks**: HTTP health endpoints on each VM
- **CloudWatch Metrics**: CPU, memory, disk usage monitoring
- **Buildkite Integration**: Agent connectivity monitoring
- **Slack Notifications**: Success/failure notifications
- **Log Aggregation**: Centralized logging for troubleshooting
## Security Considerations
- **Encrypted Disks**: All VM disks are encrypted
- **Network Security**: Security groups restrict network access
- **SSH Key Management**: Secure SSH key distribution
- **Regular Updates**: Automatic security updates
- **Process Isolation**: Complete isolation between jobs
- **Secure Credential Handling**: Secrets are managed securely
## Troubleshooting
### Common Issues
1. **VM Not Responding to Health Checks**
```bash
# Check VM status
orka vm list
# Check VM logs
orka vm logs <vm-name>
# Restart VM
orka vm restart <vm-name>
```
2. **Buildkite Agent Not Connecting**
```bash
# Check agent status
sudo launchctl list | grep buildkite
# Check agent logs
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
# Restart agent
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
```
3. **User Creation Failures**
```bash
# Check user creation logs
tail -f /var/log/system.log | grep "create-build-user"
# Manual cleanup
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh <username>
```
4. **Disk Space Issues**
```bash
# Check disk usage
df -h
# Clean up old files
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh --cleanup-all
```
### Debugging Commands
```bash
# Check system status
sudo /usr/local/bin/bun-ci/job-runner.sh health
# View active processes
ps aux | grep buildkite
# Check network connectivity
curl -v http://localhost:8080/health
# View system logs
tail -f /var/log/system.log
# Check Docker status
docker info
```
## Maintenance
### Regular Tasks
1. **Image Updates**: Images are rebuilt daily automatically
2. **Fleet Updates**: Terraform changes are applied automatically
3. **Cleanup**: Old images and snapshots are cleaned up automatically
4. **Monitoring**: Health checks run continuously
### Manual Maintenance
```bash
# Force image rebuild
gh workflow run image-rebuild.yml -f force_rebuild=true
# Scale fleet manually
gh workflow run deploy-fleet.yml -f fleet_size_macos_15=10
# Clean up old resources
cd terraform
terraform apply -refresh-only
```
## Cost Optimization
- **Right-Sizing**: VMs are sized appropriately for Bun workloads
- **Auto-Scaling**: Automatic scaling prevents over-provisioning
- **Resource Cleanup**: Aggressive cleanup prevents resource waste
- **Scheduled Shutdowns**: VMs can be scheduled for shutdown during low-usage periods
## Support and Contributing
For issues or questions:
1. Check the troubleshooting section above
2. Review GitHub Actions workflow logs
3. Check MacStadium Orka console
4. Open an issue in the repository
When contributing:
1. Test changes in a staging environment first
2. Update documentation as needed
3. Follow the existing code style
4. Add appropriate tests and validation
## License
This infrastructure code is part of the Bun project and follows the same license terms.

View File

@@ -1,376 +0,0 @@
name: Deploy macOS Runner Fleet
on:
workflow_dispatch:
inputs:
environment:
description: 'Deployment environment'
required: true
default: 'production'
type: choice
options:
- production
- staging
- development
fleet_size_macos_13:
description: 'Number of macOS 13 VMs'
required: false
default: '4'
fleet_size_macos_14:
description: 'Number of macOS 14 VMs'
required: false
default: '6'
fleet_size_macos_15:
description: 'Number of macOS 15 VMs'
required: false
default: '8'
force_deploy:
description: 'Force deployment even if no changes'
required: false
default: false
type: boolean
env:
TERRAFORM_VERSION: "1.6.0"
AWS_REGION: "us-west-2"
jobs:
validate-inputs:
runs-on: ubuntu-latest
outputs:
validated: ${{ steps.validate.outputs.validated }}
total_vms: ${{ steps.validate.outputs.total_vms }}
steps:
- name: Validate inputs
id: validate
run: |
# Validate fleet sizes
macos_13="${{ github.event.inputs.fleet_size_macos_13 }}"
macos_14="${{ github.event.inputs.fleet_size_macos_14 }}"
macos_15="${{ github.event.inputs.fleet_size_macos_15 }}"
# Check if inputs are valid numbers
if ! [[ "$macos_13" =~ ^[0-9]+$ ]] || ! [[ "$macos_14" =~ ^[0-9]+$ ]] || ! [[ "$macos_15" =~ ^[0-9]+$ ]]; then
echo "Error: Fleet sizes must be valid numbers"
exit 1
fi
# Check if at least one VM is requested
total_vms=$((macos_13 + macos_14 + macos_15))
if [[ $total_vms -eq 0 ]]; then
echo "Error: At least one VM must be requested"
exit 1
fi
# Check reasonable limits
if [[ $total_vms -gt 50 ]]; then
echo "Error: Total VMs cannot exceed 50"
exit 1
fi
echo "validated=true" >> $GITHUB_OUTPUT
echo "total_vms=$total_vms" >> $GITHUB_OUTPUT
echo "Validation passed:"
echo "- macOS 13: $macos_13 VMs"
echo "- macOS 14: $macos_14 VMs"
echo "- macOS 15: $macos_15 VMs"
echo "- Total: $total_vms VMs"
plan-deployment:
runs-on: ubuntu-latest
needs: validate-inputs
if: needs.validate-inputs.outputs.validated == 'true'
outputs:
plan_status: ${{ steps.plan.outputs.plan_status }}
has_changes: ${{ steps.plan.outputs.has_changes }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Initialize Terraform
working-directory: .buildkite/macos-runners/terraform
run: |
terraform init
terraform workspace select ${{ github.event.inputs.environment }} || terraform workspace new ${{ github.event.inputs.environment }}
- name: Create terraform variables file
working-directory: .buildkite/macos-runners/terraform
run: |
cat > terraform.tfvars << EOF
environment = "${{ github.event.inputs.environment }}"
fleet_size = {
macos_13 = ${{ github.event.inputs.fleet_size_macos_13 }}
macos_14 = ${{ github.event.inputs.fleet_size_macos_14 }}
macos_15 = ${{ github.event.inputs.fleet_size_macos_15 }}
}
EOF
- name: Plan Terraform deployment
id: plan
working-directory: .buildkite/macos-runners/terraform
run: |
set -e
# Run terraform plan
terraform plan \
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
-out=tfplan \
-detailed-exitcode > plan_output.txt 2>&1
plan_exit_code=$?
# Check plan results
if [[ $plan_exit_code -eq 0 ]]; then
echo "plan_status=no_changes" >> $GITHUB_OUTPUT
echo "has_changes=false" >> $GITHUB_OUTPUT
elif [[ $plan_exit_code -eq 2 ]]; then
echo "plan_status=has_changes" >> $GITHUB_OUTPUT
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "plan_status=failed" >> $GITHUB_OUTPUT
echo "has_changes=false" >> $GITHUB_OUTPUT
cat plan_output.txt
exit 1
fi
# Save plan output
echo "Plan output:"
cat plan_output.txt
- name: Upload plan
uses: actions/upload-artifact@v4
with:
name: terraform-plan
path: |
.buildkite/macos-runners/terraform/tfplan
.buildkite/macos-runners/terraform/plan_output.txt
retention-days: 30
deploy:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment]
if: needs.plan-deployment.outputs.has_changes == 'true' || github.event.inputs.force_deploy == 'true'
environment: ${{ github.event.inputs.environment }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Download plan
uses: actions/download-artifact@v4
with:
name: terraform-plan
path: .buildkite/macos-runners/terraform/
- name: Initialize Terraform
working-directory: .buildkite/macos-runners/terraform
run: |
terraform init
terraform workspace select ${{ github.event.inputs.environment }}
- name: Apply Terraform deployment
working-directory: .buildkite/macos-runners/terraform
run: |
echo "Applying Terraform deployment..."
terraform apply -auto-approve tfplan
- name: Get deployment outputs
working-directory: .buildkite/macos-runners/terraform
run: |
terraform output -json > terraform-outputs.json
echo "Deployment outputs:"
cat terraform-outputs.json | jq .
- name: Upload deployment outputs
uses: actions/upload-artifact@v4
with:
name: deployment-outputs-${{ github.event.inputs.environment }}
path: .buildkite/macos-runners/terraform/terraform-outputs.json
retention-days: 90
- name: Verify deployment
working-directory: .buildkite/macos-runners/terraform
run: |
echo "Verifying deployment..."
# Check VM count
vm_count=$(terraform output -json vm_instances | jq 'length')
expected_count=${{ needs.validate-inputs.outputs.total_vms }}
if [[ $vm_count -eq $expected_count ]]; then
echo "✅ VM count matches expected: $vm_count"
else
echo "❌ VM count mismatch: expected $expected_count, got $vm_count"
exit 1
fi
# Check VM states
terraform output -json vm_instances | jq -r 'to_entries[] | "\(.key): \(.value.name) - \(.value.status)"' | while read vm_info; do
echo "VM: $vm_info"
done
health-check:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment, deploy]
if: always() && needs.deploy.result == 'success'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup dependencies
run: |
sudo apt-get update
sudo apt-get install -y jq curl
- name: Download deployment outputs
uses: actions/download-artifact@v4
with:
name: deployment-outputs-${{ github.event.inputs.environment }}
path: ./
- name: Wait for VMs to be ready
run: |
echo "Waiting for VMs to be ready..."
sleep 300 # Wait 5 minutes for VMs to initialize
- name: Check VM health
run: |
echo "Checking VM health..."
# Read VM details from outputs
jq -r '.vm_instances.value | to_entries[] | "\(.value.name) \(.value.ip_address)"' terraform-outputs.json | while read vm_name vm_ip; do
echo "Checking VM: $vm_name ($vm_ip)"
# Check health endpoint
max_attempts=12
attempt=1
while [[ $attempt -le $max_attempts ]]; do
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
echo "✅ $vm_name is healthy"
break
else
echo "⏳ $vm_name not ready yet (attempt $attempt/$max_attempts)"
sleep 30
((attempt++))
fi
done
if [[ $attempt -gt $max_attempts ]]; then
echo "❌ $vm_name failed health check"
fi
done
- name: Check Buildkite connectivity
run: |
echo "Checking Buildkite agent connectivity..."
# Wait a bit more for agents to connect
sleep 60
# Check connected agents
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state) \(.hostname)"' | \
while read agent_name state hostname; do
echo "Agent: $agent_name - State: $state - Host: $hostname"
done
notify-success:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment, deploy, health-check]
if: always() && needs.deploy.result == 'success'
steps:
- name: Notify success
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: success
title: "macOS runner fleet deployed successfully"
description: |
🚀 **macOS runner fleet deployed successfully**
**Environment:** ${{ github.event.inputs.environment }}
**Total VMs:** ${{ needs.validate-inputs.outputs.total_vms }}
**Fleet composition:**
- macOS 13: ${{ github.event.inputs.fleet_size_macos_13 }} VMs
- macOS 14: ${{ github.event.inputs.fleet_size_macos_14 }} VMs
- macOS 15: ${{ github.event.inputs.fleet_size_macos_15 }} VMs
**Repository:** ${{ github.repository }}
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0x00ff00
username: "GitHub Actions"
notify-failure:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment, deploy, health-check]
if: always() && (needs.validate-inputs.result == 'failure' || needs.plan-deployment.result == 'failure' || needs.deploy.result == 'failure')
steps:
- name: Notify failure
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: failure
title: "macOS runner fleet deployment failed"
description: |
🔴 **macOS runner fleet deployment failed**
**Environment:** ${{ github.event.inputs.environment }}
**Failed stage:** ${{ needs.validate-inputs.result == 'failure' && 'Validation' || needs.plan-deployment.result == 'failure' && 'Planning' || 'Deployment' }}
**Repository:** ${{ github.repository }}
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
Please check the logs for more details.
color: 0xff0000
username: "GitHub Actions"
notify-no-changes:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment]
if: needs.plan-deployment.outputs.has_changes == 'false' && github.event.inputs.force_deploy != 'true'
steps:
- name: Notify no changes
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: cancelled
title: "macOS runner fleet deployment skipped"
description: |
**macOS runner fleet deployment skipped** - no changes detected in Terraform plan
color: 0x808080
username: "GitHub Actions"

View File

@@ -1,515 +0,0 @@
name: Rebuild macOS Runner Images
on:
schedule:
# Run daily at 2 AM UTC
- cron: '0 2 * * *'
workflow_dispatch:
inputs:
macos_versions:
description: 'macOS versions to rebuild (comma-separated: 13,14,15)'
required: false
default: '13,14,15'
force_rebuild:
description: 'Force rebuild even if no changes detected'
required: false
default: 'false'
type: boolean
env:
PACKER_VERSION: "1.9.4"
TERRAFORM_VERSION: "1.6.0"
jobs:
check-changes:
runs-on: ubuntu-latest
outputs:
should_rebuild: ${{ steps.check.outputs.should_rebuild }}
changed_files: ${{ steps.check.outputs.changed_files }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Check for changes
id: check
run: |
# Check if any relevant files have changed in the last 24 hours
changed_files=$(git diff --name-only HEAD~1 HEAD | grep -E "(bootstrap|packer|\.buildkite/macos-runners)" | head -20)
if [[ -n "$changed_files" ]] || [[ "${{ github.event.inputs.force_rebuild }}" == "true" ]]; then
echo "should_rebuild=true" >> $GITHUB_OUTPUT
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
echo "$changed_files" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "should_rebuild=false" >> $GITHUB_OUTPUT
echo "changed_files=" >> $GITHUB_OUTPUT
fi
build-images:
runs-on: ubuntu-latest
needs: check-changes
if: needs.check-changes.outputs.should_rebuild == 'true'
strategy:
matrix:
macos_version: [13, 14, 15]
fail-fast: false
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Packer
uses: hashicorp/setup-packer@main
with:
version: ${{ env.PACKER_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y jq curl
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Validate Packer configuration
working-directory: .buildkite/macos-runners/packer
run: |
packer validate \
-var "macos_version=${{ matrix.macos_version }}" \
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
macos-base.pkr.hcl
- name: Build macOS ${{ matrix.macos_version }} image
working-directory: .buildkite/macos-runners/packer
run: |
echo "Building macOS ${{ matrix.macos_version }} image..."
# Set build variables
export PACKER_LOG=1
export PACKER_LOG_PATH="./packer-build-macos-${{ matrix.macos_version }}.log"
# Build the image
packer build \
-var "macos_version=${{ matrix.macos_version }}" \
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
-var "base_image=base-images/macos-${{ matrix.macos_version }}-$([ ${{ matrix.macos_version }} -eq 13 ] && echo 'ventura' || [ ${{ matrix.macos_version }} -eq 14 ] && echo 'sonoma' || echo 'sequoia')" \
macos-base.pkr.hcl
- name: Validate built image
working-directory: .buildkite/macos-runners/packer
run: |
echo "Validating built image..."
# Get the latest built image ID
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
if [ -z "$IMAGE_ID" ]; then
echo "❌ No image found for macOS ${{ matrix.macos_version }}"
exit 1
fi
echo "✅ Found image: $IMAGE_ID"
# Create a test VM to validate the image
VM_NAME="test-validation-${{ matrix.macos_version }}-$(date +%s)"
echo "Creating test VM: $VM_NAME"
orka vm create \
--name "$VM_NAME" \
--image "$IMAGE_ID" \
--cpu 4 \
--memory 8 \
--wait
# Wait for VM to be ready
sleep 60
# Get VM IP
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
echo "Testing VM at IP: $VM_IP"
# Test software installations
echo "Testing software installations..."
# Test Node.js
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'node --version' || exit 1
# Test Bun
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'bun --version' || exit 1
# Test build tools
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'cmake --version' || exit 1
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'clang --version' || exit 1
# Test Docker
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'docker --version' || exit 1
# Test Tailscale
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'tailscale --version' || exit 1
# Test health endpoint
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'curl -f http://localhost:8080/health' || exit 1
echo "✅ All software validations passed"
# Clean up test VM
orka vm delete "$VM_NAME" --force
echo "✅ Image validation completed successfully"
- name: Run flakiness checks
working-directory: .buildkite/macos-runners/packer
run: |
echo "Running flakiness checks..."
# Get the latest built image ID
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
# Run multiple test iterations to check for flakiness
ITERATIONS=3
PASSED=0
FAILED=0
for i in $(seq 1 $ITERATIONS); do
echo "Running flakiness test iteration $i/$ITERATIONS..."
VM_NAME="flakiness-test-${{ matrix.macos_version }}-$i-$(date +%s)"
# Create test VM
orka vm create \
--name "$VM_NAME" \
--image "$IMAGE_ID" \
--cpu 4 \
--memory 8 \
--wait
sleep 30
# Get VM IP
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
# Run a series of quick tests
TEST_PASSED=true
# Test 1: Basic command execution
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'echo "test" > /tmp/test.txt && cat /tmp/test.txt'; then
echo "❌ Basic command test failed"
TEST_PASSED=false
fi
# Test 2: Node.js execution
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'node -e "console.log(\"Node.js test\")"'; then
echo "❌ Node.js test failed"
TEST_PASSED=false
fi
# Test 3: Bun execution
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'bun -e "console.log(\"Bun test\")"'; then
echo "❌ Bun test failed"
TEST_PASSED=false
fi
# Test 4: Build tools
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'clang --version > /tmp/clang_version.txt'; then
echo "❌ Clang test failed"
TEST_PASSED=false
fi
# Test 5: File system operations
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'mkdir -p /tmp/test_dir && touch /tmp/test_dir/test_file'; then
echo "❌ File system test failed"
TEST_PASSED=false
fi
# Test 6: Process creation
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'ps aux | grep -v grep | wc -l'; then
echo "❌ Process test failed"
TEST_PASSED=false
fi
# Clean up test VM
orka vm delete "$VM_NAME" --force
if [ "$TEST_PASSED" = true ]; then
echo "✅ Iteration $i passed"
PASSED=$((PASSED + 1))
else
echo "❌ Iteration $i failed"
FAILED=$((FAILED + 1))
fi
# Short delay between iterations
sleep 10
done
echo "Flakiness check results:"
echo "- Passed: $PASSED/$ITERATIONS"
echo "- Failed: $FAILED/$ITERATIONS"
# Calculate success rate
SUCCESS_RATE=$((PASSED * 100 / ITERATIONS))
echo "- Success rate: $SUCCESS_RATE%"
# Fail if success rate is below 80%
if [ $SUCCESS_RATE -lt 80 ]; then
echo "❌ Image is too flaky! Success rate: $SUCCESS_RATE% (minimum: 80%)"
exit 1
fi
echo "✅ Flakiness checks passed with $SUCCESS_RATE% success rate"
- name: Upload build logs
if: always()
uses: actions/upload-artifact@v4
with:
name: packer-logs-macos-${{ matrix.macos_version }}
path: .buildkite/macos-runners/packer/packer-build-macos-${{ matrix.macos_version }}.log
retention-days: 7
- name: Notify on failure
if: failure()
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: failure
title: "macOS ${{ matrix.macos_version }} image build failed"
description: |
🔴 **macOS ${{ matrix.macos_version }} image build failed**
**Repository:** ${{ github.repository }}
**Branch:** ${{ github.ref }}
**Commit:** ${{ github.sha }}
[Check the logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0xff0000
username: "GitHub Actions"
update-terraform:
runs-on: ubuntu-latest
needs: [check-changes, build-images]
if: needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Initialize Terraform
working-directory: .buildkite/macos-runners/terraform
run: |
terraform init
terraform workspace select production || terraform workspace new production
- name: Plan Terraform changes
working-directory: .buildkite/macos-runners/terraform
run: |
terraform plan \
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
-out=tfplan
- name: Apply Terraform changes
working-directory: .buildkite/macos-runners/terraform
run: |
terraform apply -auto-approve tfplan
- name: Save Terraform outputs
working-directory: .buildkite/macos-runners/terraform
run: |
terraform output -json > terraform-outputs.json
- name: Upload Terraform outputs
uses: actions/upload-artifact@v4
with:
name: terraform-outputs
path: .buildkite/macos-runners/terraform/terraform-outputs.json
retention-days: 30
cleanup-old-images:
runs-on: ubuntu-latest
needs: [check-changes, build-images, update-terraform]
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup AWS CLI
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Install MacStadium CLI
run: |
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
sudo mv orka-cli /usr/local/bin/orka
chmod +x /usr/local/bin/orka
- name: Configure MacStadium CLI
run: |
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
- name: Clean up old images
run: |
echo "Cleaning up old images..."
# Get list of all images
orka image list --output json > images.json
# Find images older than 7 days
cutoff_date=$(date -d '7 days ago' +%s)
# Parse and delete old images
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' images.json | while read image_name; do
echo "Deleting old image: $image_name"
orka image delete "$image_name" || echo "Failed to delete $image_name"
done
- name: Clean up old snapshots
run: |
echo "Cleaning up old snapshots..."
# Get list of all snapshots
orka snapshot list --output json > snapshots.json
# Find snapshots older than 7 days
cutoff_date=$(date -d '7 days ago' +%s)
# Parse and delete old snapshots
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' snapshots.json | while read snapshot_name; do
echo "Deleting old snapshot: $snapshot_name"
orka snapshot delete "$snapshot_name" || echo "Failed to delete $snapshot_name"
done
health-check:
runs-on: ubuntu-latest
needs: [check-changes, build-images, update-terraform]
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup AWS CLI
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Install MacStadium CLI
run: |
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
sudo mv orka-cli /usr/local/bin/orka
chmod +x /usr/local/bin/orka
- name: Configure MacStadium CLI
run: |
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
- name: Health check VMs
run: |
echo "Performing health check on VMs..."
# Get list of running VMs
orka vm list --output json > vms.json
# Check each VM
jq -r '.[] | select(.name | test("^bun-runner-")) | select(.status == "running") | "\(.name) \(.ip_address)"' vms.json | while read vm_name vm_ip; do
echo "Checking VM: $vm_name ($vm_ip)"
# Check if VM is responding to health checks
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
echo "✅ $vm_name is healthy"
else
echo "❌ $vm_name is not responding to health checks"
fi
done
- name: Check Buildkite agent connectivity
run: |
echo "Checking Buildkite agent connectivity..."
# Use Buildkite API to check connected agents
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state)"' | \
while read agent_name state; do
echo "Agent: $agent_name - State: $state"
done
notify-success:
runs-on: ubuntu-latest
needs: [check-changes, build-images, update-terraform, cleanup-old-images, health-check]
if: always() && needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
steps:
- name: Notify success
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: success
title: "macOS runner images rebuilt successfully"
description: |
✅ **macOS runner images rebuilt successfully**
**Repository:** ${{ github.repository }}
**Branch:** ${{ github.ref }}
**Commit:** ${{ github.sha }}
**Changes detected in:**
${{ needs.check-changes.outputs.changed_files }}
**Images built:** ${{ join(github.event.inputs.macos_versions || '13,14,15', ', ') }}
[Check the deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0x00ff00
username: "GitHub Actions"
notify-skip:
runs-on: ubuntu-latest
needs: check-changes
if: needs.check-changes.outputs.should_rebuild == 'false'
steps:
- name: Notify skip
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: cancelled
title: "macOS runner image rebuild skipped"
description: |
**macOS runner image rebuild skipped** - no changes detected in the last 24 hours
color: 0x808080
username: "GitHub Actions"

View File

@@ -1,270 +0,0 @@
packer {
required_plugins {
macstadium-orka = {
version = ">= 3.0.0"
source = "github.com/macstadium/macstadium-orka"
}
}
}
variable "orka_endpoint" {
description = "MacStadium Orka endpoint"
type = string
default = env("ORKA_ENDPOINT")
}
variable "orka_auth_token" {
description = "MacStadium Orka auth token"
type = string
default = env("ORKA_AUTH_TOKEN")
sensitive = true
}
variable "base_image" {
description = "Base macOS image to use"
type = string
default = "base-images/macos-15-sequoia"
}
variable "macos_version" {
description = "macOS version (13, 14, 15)"
type = string
default = "15"
}
variable "cpu_count" {
description = "Number of CPU cores"
type = number
default = 12
}
variable "memory_gb" {
description = "Memory in GB"
type = number
default = 32
}
source "macstadium-orka" "base" {
orka_endpoint = var.orka_endpoint
orka_auth_token = var.orka_auth_token
source_image = var.base_image
image_name = "bun-macos-${var.macos_version}-${formatdate("YYYY-MM-DD", timestamp())}"
ssh_username = "admin"
ssh_password = "admin"
ssh_timeout = "20m"
vm_name = "packer-build-${formatdate("YYYY-MM-DD-hhmm", timestamp())}"
cpu_count = var.cpu_count
memory_gb = var.memory_gb
# Enable GPU acceleration for better performance
gpu_passthrough = true
# Network configuration
vnc_bind_address = "0.0.0.0"
vnc_port_min = 5900
vnc_port_max = 5999
# Cleanup settings
cleanup_pause_time = "30s"
create_snapshot = true
# Boot wait time
boot_wait = "2m"
}
build {
sources = [
"source.macstadium-orka.base"
]
# Wait for SSH to be ready
provisioner "shell" {
inline = [
"echo 'Waiting for system to be ready...'",
"until ping -c1 google.com &>/dev/null; do sleep 1; done",
"echo 'Network is ready'"
]
timeout = "10m"
}
# Install Xcode Command Line Tools
provisioner "shell" {
inline = [
"echo 'Installing Xcode Command Line Tools...'",
"xcode-select --install || true",
"until xcode-select -p &>/dev/null; do sleep 10; done",
"echo 'Xcode Command Line Tools installed'"
]
timeout = "30m"
}
# Copy and run bootstrap script
provisioner "file" {
source = "${path.root}/../scripts/bootstrap-macos.sh"
destination = "/tmp/bootstrap-macos.sh"
}
provisioner "shell" {
inline = [
"chmod +x /tmp/bootstrap-macos.sh",
"sudo /tmp/bootstrap-macos.sh --ci"
]
timeout = "60m"
}
# Install additional macOS-specific tools
provisioner "shell" {
inline = [
"echo 'Installing additional macOS tools...'",
"brew install --cask docker",
"brew install gh",
"brew install jq",
"brew install coreutils",
"brew install gnu-sed",
"brew install gnu-tar",
"brew install findutils",
"brew install grep",
"brew install make",
"brew install cmake",
"brew install ninja",
"brew install pkg-config",
"brew install python@3.11",
"brew install python@3.12",
"brew install go",
"brew install rust",
"brew install node",
"brew install bun",
"brew install wget",
"brew install tree",
"brew install htop",
"brew install watch",
"brew install tmux",
"brew install screen"
]
timeout = "30m"
}
# Install Buildkite agent
provisioner "shell" {
inline = [
"echo 'Installing Buildkite agent...'",
"brew install buildkite/buildkite/buildkite-agent",
"sudo mkdir -p /usr/local/var/buildkite-agent",
"sudo mkdir -p /usr/local/var/log/buildkite-agent",
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent"
]
timeout = "10m"
}
# Copy user management scripts
provisioner "file" {
source = "${path.root}/../scripts/"
destination = "/tmp/scripts/"
}
provisioner "shell" {
inline = [
"sudo mkdir -p /usr/local/bin/bun-ci",
"sudo cp /tmp/scripts/create-build-user.sh /usr/local/bin/bun-ci/",
"sudo cp /tmp/scripts/cleanup-build-user.sh /usr/local/bin/bun-ci/",
"sudo cp /tmp/scripts/job-runner.sh /usr/local/bin/bun-ci/",
"sudo chmod +x /usr/local/bin/bun-ci/*.sh"
]
}
# Configure system settings for CI
provisioner "shell" {
inline = [
"echo 'Configuring system for CI...'",
"# Disable sleep and screensaver",
"sudo pmset -a displaysleep 0 sleep 0 disksleep 0",
"sudo pmset -a womp 1",
"# Disable automatic updates",
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false",
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false",
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false",
"# Increase file descriptor limits",
"echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf",
"echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf",
"# Enable core dumps",
"sudo mkdir -p /cores",
"sudo chmod 777 /cores",
"echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf"
]
}
# Configure LaunchDaemon for Buildkite agent
provisioner "shell" {
inline = [
"echo 'Configuring Buildkite LaunchDaemon...'",
"sudo tee /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist > /dev/null <<EOF",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
"<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">",
"<plist version=\"1.0\">",
"<dict>",
" <key>Label</key>",
" <string>com.buildkite.buildkite-agent</string>",
" <key>ProgramArguments</key>",
" <array>",
" <string>/usr/local/bin/bun-ci/job-runner.sh</string>",
" </array>",
" <key>RunAtLoad</key>",
" <true/>",
" <key>KeepAlive</key>",
" <true/>",
" <key>StandardOutPath</key>",
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.log</string>",
" <key>StandardErrorPath</key>",
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.error.log</string>",
" <key>EnvironmentVariables</key>",
" <dict>",
" <key>PATH</key>",
" <string>/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>",
" </dict>",
"</dict>",
"</plist>",
"EOF"
]
}
# Clean up
provisioner "shell" {
inline = [
"echo 'Cleaning up...'",
"rm -rf /tmp/bootstrap-macos.sh /tmp/scripts/",
"sudo rm -rf /var/log/*.log /var/log/*/*.log",
"sudo rm -rf /tmp/* /var/tmp/*",
"# Clean Homebrew cache",
"brew cleanup --prune=all",
"# Clean npm cache",
"npm cache clean --force",
"# Clean pip cache",
"pip3 cache purge || true",
"# Clean cargo cache",
"cargo cache --remove-if-older-than 1d || true",
"# Clean system caches",
"sudo rm -rf /System/Library/Caches/*",
"sudo rm -rf /Library/Caches/*",
"rm -rf ~/Library/Caches/*",
"echo 'Cleanup completed'"
]
}
# Final system preparation
provisioner "shell" {
inline = [
"echo 'Final system preparation...'",
"# Ensure proper permissions",
"sudo chown -R admin:admin /usr/local/bin/bun-ci",
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent",
"# Load the LaunchDaemon",
"sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist",
"echo 'Image preparation completed'"
]
}
}

View File

@@ -1,400 +0,0 @@
#!/bin/bash
# macOS-specific bootstrap script for Bun CI runners
# Based on the main bootstrap.sh but optimized for macOS CI environments
set -euo pipefail
print() {
echo "$@"
}
error() {
print "error: $@" >&2
exit 1
}
execute() {
print "$ $@" >&2
if ! "$@"; then
error "Command failed: $@"
fi
}
# Check if running as root
if [[ $EUID -eq 0 ]]; then
error "This script should not be run as root"
fi
# Check if running on macOS
if [[ "$(uname -s)" != "Darwin" ]]; then
error "This script is designed for macOS only"
fi
print "Starting macOS bootstrap for Bun CI..."
# Get macOS version
MACOS_VERSION=$(sw_vers -productVersion)
MACOS_MAJOR=$(echo "$MACOS_VERSION" | cut -d. -f1)
MACOS_MINOR=$(echo "$MACOS_VERSION" | cut -d. -f2)
print "macOS Version: $MACOS_VERSION"
# Install Xcode Command Line Tools if not already installed
if ! xcode-select -p &>/dev/null; then
print "Installing Xcode Command Line Tools..."
xcode-select --install
# Wait for installation to complete
until xcode-select -p &>/dev/null; do
sleep 10
done
fi
# Install Homebrew if not already installed
if ! command -v brew &>/dev/null; then
print "Installing Homebrew..."
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
# Add Homebrew to PATH
if [[ "$(uname -m)" == "arm64" ]]; then
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> ~/.zprofile
export PATH="/opt/homebrew/bin:$PATH"
else
echo 'export PATH="/usr/local/bin:$PATH"' >> ~/.zprofile
export PATH="/usr/local/bin:$PATH"
fi
fi
# Configure Homebrew for CI
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_AUTO_UPDATE=1
export HOMEBREW_NO_ANALYTICS=1
# Update Homebrew
print "Updating Homebrew..."
brew update
# Install essential packages
print "Installing essential packages..."
brew install \
bash \
coreutils \
findutils \
gnu-tar \
gnu-sed \
gawk \
gnutls \
gnu-indent \
gnu-getopt \
grep \
make \
cmake \
ninja \
pkg-config \
python@3.11 \
python@3.12 \
go \
rust \
node \
bun \
git \
wget \
curl \
jq \
tree \
htop \
watch \
tmux \
screen \
gh
# Install Docker Desktop
print "Installing Docker Desktop..."
if [[ ! -d "/Applications/Docker.app" ]]; then
if [[ "$(uname -m)" == "arm64" ]]; then
curl -L "https://desktop.docker.com/mac/main/arm64/Docker.dmg" -o /tmp/Docker.dmg
else
curl -L "https://desktop.docker.com/mac/main/amd64/Docker.dmg" -o /tmp/Docker.dmg
fi
hdiutil attach /tmp/Docker.dmg
cp -R /Volumes/Docker/Docker.app /Applications/
hdiutil detach /Volumes/Docker
rm /tmp/Docker.dmg
fi
# Install Buildkite agent
print "Installing Buildkite agent..."
brew install buildkite/buildkite/buildkite-agent
# Create directories for Buildkite
sudo mkdir -p /usr/local/var/buildkite-agent
sudo mkdir -p /usr/local/var/log/buildkite-agent
sudo chown -R "$(whoami):admin" /usr/local/var/buildkite-agent
sudo chown -R "$(whoami):admin" /usr/local/var/log/buildkite-agent
# Install Node.js versions (exact version from bootstrap.sh)
print "Installing specific Node.js version..."
NODE_VERSION="24.3.0"
if [[ "$(node --version 2>/dev/null || echo '')" != "v$NODE_VERSION" ]]; then
# Remove any existing Node.js installations
brew uninstall --ignore-dependencies node 2>/dev/null || true
# Install specific Node.js version
if [[ "$(uname -m)" == "arm64" ]]; then
NODE_ARCH="arm64"
else
NODE_ARCH="x64"
fi
NODE_URL="https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
NODE_TAR="/tmp/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
curl -fsSL "$NODE_URL" -o "$NODE_TAR"
sudo tar -xzf "$NODE_TAR" -C /usr/local --strip-components=1
rm "$NODE_TAR"
# Verify installation
if [[ "$(node --version)" != "v$NODE_VERSION" ]]; then
error "Node.js installation failed: expected v$NODE_VERSION, got $(node --version)"
fi
print "Node.js v$NODE_VERSION installed successfully"
fi
# Install Node.js headers (matching bootstrap.sh)
print "Installing Node.js headers..."
NODE_HEADERS_URL="https://nodejs.org/download/release/v$NODE_VERSION/node-v$NODE_VERSION-headers.tar.gz"
NODE_HEADERS_TAR="/tmp/node-v$NODE_VERSION-headers.tar.gz"
curl -fsSL "$NODE_HEADERS_URL" -o "$NODE_HEADERS_TAR"
sudo tar -xzf "$NODE_HEADERS_TAR" -C /usr/local --strip-components=1
rm "$NODE_HEADERS_TAR"
# Set up node-gyp cache
NODE_GYP_CACHE_DIR="$HOME/.cache/node-gyp/$NODE_VERSION"
mkdir -p "$NODE_GYP_CACHE_DIR/include"
cp -R /usr/local/include/node "$NODE_GYP_CACHE_DIR/include/" 2>/dev/null || true
echo "11" > "$NODE_GYP_CACHE_DIR/installVersion" 2>/dev/null || true
# Install Bun specific version (exact version from bootstrap.sh)
print "Installing specific Bun version..."
BUN_VERSION="1.2.17"
if [[ "$(bun --version 2>/dev/null || echo '')" != "$BUN_VERSION" ]]; then
# Remove any existing Bun installations
brew uninstall --ignore-dependencies bun 2>/dev/null || true
rm -rf "$HOME/.bun" 2>/dev/null || true
# Install specific Bun version
if [[ "$(uname -m)" == "arm64" ]]; then
BUN_TRIPLET="bun-darwin-aarch64"
else
BUN_TRIPLET="bun-darwin-x64"
fi
BUN_URL="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$BUN_VERSION/$BUN_TRIPLET.zip"
BUN_ZIP="/tmp/$BUN_TRIPLET.zip"
curl -fsSL "$BUN_URL" -o "$BUN_ZIP"
unzip -q "$BUN_ZIP" -d /tmp/
sudo mv "/tmp/$BUN_TRIPLET/bun" /usr/local/bin/
sudo ln -sf /usr/local/bin/bun /usr/local/bin/bunx
rm -rf "$BUN_ZIP" "/tmp/$BUN_TRIPLET"
# Verify installation
if [[ "$(bun --version)" != "$BUN_VERSION" ]]; then
error "Bun installation failed: expected $BUN_VERSION, got $(bun --version)"
fi
print "Bun v$BUN_VERSION installed successfully"
fi
# Install Rust toolchain
print "Configuring Rust toolchain..."
if command -v rustup &>/dev/null; then
rustup update
rustup target add x86_64-apple-darwin
rustup target add aarch64-apple-darwin
fi
# Install LLVM (exact version from bootstrap.sh)
print "Installing LLVM..."
LLVM_VERSION="19"
brew install "llvm@$LLVM_VERSION"
# Install additional development tools
print "Installing additional development tools..."
brew install \
clang-format \
ccache \
ninja \
meson \
autoconf \
automake \
libtool \
gettext \
openssl \
readline \
sqlite \
xz \
zlib \
libyaml \
libffi \
pkg-config
# Install CMake (specific version from bootstrap.sh)
print "Installing CMake..."
CMAKE_VERSION="3.30.5"
brew uninstall --ignore-dependencies cmake 2>/dev/null || true
if [[ "$(uname -m)" == "arm64" ]]; then
CMAKE_ARCH="macos-universal"
else
CMAKE_ARCH="macos-universal"
fi
CMAKE_URL="https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
CMAKE_TAR="/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
curl -fsSL "$CMAKE_URL" -o "$CMAKE_TAR"
tar -xzf "$CMAKE_TAR" -C /tmp/
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/bin/"* /usr/local/bin/
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/share/"* /usr/local/share/
rm -rf "$CMAKE_TAR" "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH"
# Install Age for core dump encryption (macOS equivalent)
print "Installing Age for encryption..."
if [[ "$(uname -m)" == "arm64" ]]; then
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-arm64.tar.gz"
AGE_SHA256="4a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
else
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-amd64.tar.gz"
AGE_SHA256="5a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
fi
AGE_TAR="/tmp/age.tar.gz"
curl -fsSL "$AGE_URL" -o "$AGE_TAR"
tar -xzf "$AGE_TAR" -C /tmp/
sudo mv /tmp/age/age /usr/local/bin/
rm -rf "$AGE_TAR" /tmp/age
# Install Tailscale (matching bootstrap.sh implementation)
print "Installing Tailscale..."
if [[ "$docker" != "1" ]]; then
if [[ ! -d "/Applications/Tailscale.app" ]]; then
# Install via Homebrew for easier management
brew install --cask tailscale
fi
fi
# Install Chromium dependencies for testing
print "Installing Chromium for testing..."
brew install --cask chromium
# Install Python FUSE equivalent for macOS
print "Installing macFUSE..."
if [[ ! -d "/Library/Frameworks/macFUSE.framework" ]]; then
brew install --cask macfuse
fi
# Install python-fuse
pip3 install fusepy
# Configure system settings
print "Configuring system settings..."
# Disable sleep and screensaver
sudo pmset -a displaysleep 0 sleep 0 disksleep 0
sudo pmset -a womp 1
# Disable automatic updates
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false
# Increase file descriptor limits
echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf
echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf
# Enable core dumps
sudo mkdir -p /cores
sudo chmod 777 /cores
echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf
# Configure shell environment
print "Configuring shell environment..."
# Add Homebrew paths to shell profiles
SHELL_PROFILES=(.zshrc .zprofile .bash_profile .bashrc)
for profile in "${SHELL_PROFILES[@]}"; do
if [[ -f "$HOME/$profile" ]] || [[ "$1" == "--ci" ]]; then
if [[ "$(uname -m)" == "arm64" ]]; then
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> "$HOME/$profile"
else
echo 'export PATH="/usr/local/bin:$PATH"' >> "$HOME/$profile"
fi
# Add other useful paths
echo 'export PATH="/usr/local/bin/bun-ci:$PATH"' >> "$HOME/$profile"
echo 'export PATH="/usr/local/sbin:$PATH"' >> "$HOME/$profile"
# Environment variables for CI
echo 'export HOMEBREW_NO_INSTALL_CLEANUP=1' >> "$HOME/$profile"
echo 'export HOMEBREW_NO_AUTO_UPDATE=1' >> "$HOME/$profile"
echo 'export HOMEBREW_NO_ANALYTICS=1' >> "$HOME/$profile"
echo 'export CI=1' >> "$HOME/$profile"
echo 'export BUILDKITE=true' >> "$HOME/$profile"
# Development environment variables
echo 'export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"' >> "$HOME/$profile"
echo 'export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"' >> "$HOME/$profile"
# Node.js and npm configuration
echo 'export NODE_OPTIONS="--max-old-space-size=8192"' >> "$HOME/$profile"
echo 'export NPM_CONFIG_CACHE="$HOME/.npm"' >> "$HOME/$profile"
# Rust configuration
echo 'export CARGO_HOME="$HOME/.cargo"' >> "$HOME/$profile"
echo 'export RUSTUP_HOME="$HOME/.rustup"' >> "$HOME/$profile"
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> "$HOME/$profile"
# Go configuration
echo 'export GOPATH="$HOME/go"' >> "$HOME/$profile"
echo 'export PATH="$GOPATH/bin:$PATH"' >> "$HOME/$profile"
# Python configuration
echo 'export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"' >> "$HOME/$profile"
# Bun configuration
echo 'export BUN_INSTALL="$HOME/.bun"' >> "$HOME/$profile"
echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> "$HOME/$profile"
# LLVM configuration
echo 'export PATH="/usr/local/opt/llvm/bin:$PATH"' >> "$HOME/$profile"
echo 'export LDFLAGS="-L/usr/local/opt/llvm/lib"' >> "$HOME/$profile"
echo 'export CPPFLAGS="-I/usr/local/opt/llvm/include"' >> "$HOME/$profile"
fi
done
# Create symbolic links for GNU tools
print "Creating symbolic links for GNU tools..."
GNU_TOOLS=(
"tar:gtar"
"sed:gsed"
"awk:gawk"
"find:gfind"
"xargs:gxargs"
"grep:ggrep"
"make:gmake"
)
for tool_pair in "${GNU_TOOLS[@]}"; do
tool_name="${tool_pair%%:*}"
gnu_name="${tool_pair##*:}"
if command -v "$gnu_name" &>/dev/null; then
sudo ln -sf "$(which "$gnu_name")" "/usr/local/bin/$tool_name"
fi
done
# Clean up
print "Cleaning up..."
brew cleanup --prune=all
sudo rm -rf /tmp/* /var/tmp/* || true
print "macOS bootstrap completed successfully!"
print "System is ready for Bun CI workloads."

View File

@@ -1,141 +0,0 @@
#!/bin/bash
# Clean up build user and all associated processes/files
# This ensures complete cleanup after each job
set -euo pipefail
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
error() {
print "ERROR: $*" >&2
exit 1
}
# Check if running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
USERNAME="${1:-}"
if [[ -z "$USERNAME" ]]; then
error "Usage: $0 <username>"
fi
print "Cleaning up build user: ${USERNAME}"
# Check if user exists
if ! id "${USERNAME}" &>/dev/null; then
print "User ${USERNAME} does not exist, nothing to clean up"
exit 0
fi
USER_HOME="/Users/${USERNAME}"
# Stop any background timeout processes
pkill -f "job-timeout.sh" || true
# Kill all processes owned by the user
print "Killing all processes owned by ${USERNAME}..."
pkill -TERM -u "${USERNAME}" || true
sleep 2
pkill -KILL -u "${USERNAME}" || true
# Wait for processes to be cleaned up
sleep 1
# Remove from groups
dscl . delete /Groups/admin GroupMembership "${USERNAME}" 2>/dev/null || true
dscl . delete /Groups/wheel GroupMembership "${USERNAME}" 2>/dev/null || true
dscl . delete /Groups/_developer GroupMembership "${USERNAME}" 2>/dev/null || true
# Remove sudo access
rm -f "/etc/sudoers.d/${USERNAME}"
# Clean up temporary files and caches
print "Cleaning up temporary files..."
if [[ -d "${USER_HOME}" ]]; then
# Clean up known cache directories
rm -rf "${USER_HOME}/.npm/_cacache" || true
rm -rf "${USER_HOME}/.npm/_logs" || true
rm -rf "${USER_HOME}/.cargo/registry" || true
rm -rf "${USER_HOME}/.cargo/git" || true
rm -rf "${USER_HOME}/.rustup/tmp" || true
rm -rf "${USER_HOME}/.cache" || true
rm -rf "${USER_HOME}/Library/Caches" || true
rm -rf "${USER_HOME}/Library/Logs" || true
rm -rf "${USER_HOME}/Library/Application Support/Crash Reports" || true
rm -rf "${USER_HOME}/tmp" || true
rm -rf "${USER_HOME}/.bun/install/cache" || true
# Clean up workspace
rm -rf "${USER_HOME}/workspace" || true
# Clean up any Docker containers/images created by this user
if command -v docker &>/dev/null; then
docker ps -a --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rm -f || true
docker images --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rmi -f || true
fi
fi
# Clean up system-wide temporary files related to this user
rm -rf "/tmp/${USERNAME}-"* || true
rm -rf "/var/tmp/${USERNAME}-"* || true
# Clean up any core dumps
rm -f "/cores/core.${USERNAME}."* || true
# Clean up any launchd jobs
launchctl list | grep -E "^[0-9].*${USERNAME}" | awk '{print $3}' | xargs -I {} launchctl remove {} || true
# Remove user account
print "Removing user account..."
dscl . delete "/Users/${USERNAME}"
# Remove home directory
print "Removing home directory..."
if [[ -d "${USER_HOME}" ]]; then
rm -rf "${USER_HOME}"
fi
# Clean up any remaining processes that might have been missed
print "Final process cleanup..."
ps aux | grep -E "^${USERNAME}\s" | awk '{print $2}' | xargs -r kill -9 || true
# Clean up shared memory segments
ipcs -m | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -m || true
# Clean up semaphores
ipcs -s | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -s || true
# Clean up message queues
ipcs -q | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -q || true
# Clean up any remaining files owned by the user
print "Cleaning up remaining files..."
find /tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
find /var/tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
# Clean up any network interfaces or ports that might be held
lsof -t -u "${USERNAME}" 2>/dev/null | xargs -r kill -9 || true
# Clean up any mount points
mount | grep "${USERNAME}" | awk '{print $3}' | xargs -r umount || true
# Verify cleanup
if id "${USERNAME}" &>/dev/null; then
error "Failed to remove user ${USERNAME}"
fi
if [[ -d "${USER_HOME}" ]]; then
error "Failed to remove home directory ${USER_HOME}"
fi
print "Build user ${USERNAME} cleaned up successfully"
# Free up memory
sync
purge || true
print "Cleanup completed"

View File

@@ -1,158 +0,0 @@
#!/bin/bash
# Create isolated build user for each Buildkite job
# This ensures complete isolation between jobs
set -euo pipefail
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
error() {
print "ERROR: $*" >&2
exit 1
}
# Check if running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
# Generate unique user name
JOB_ID="${BUILDKITE_JOB_ID:-$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)}"
USERNAME="bk-${JOB_ID}"
USER_HOME="/Users/${USERNAME}"
print "Creating build user: ${USERNAME}"
# Check if user already exists
if id "${USERNAME}" &>/dev/null; then
print "User ${USERNAME} already exists, cleaning up first..."
/usr/local/bin/bun-ci/cleanup-build-user.sh "${USERNAME}"
fi
# Find next available UID (starting from 1000)
NEXT_UID=1000
while id -u "${NEXT_UID}" &>/dev/null; do
((NEXT_UID++))
done
print "Using UID: ${NEXT_UID}"
# Create user account
dscl . create "/Users/${USERNAME}"
dscl . create "/Users/${USERNAME}" UserShell /bin/bash
dscl . create "/Users/${USERNAME}" RealName "Buildkite Job ${JOB_ID}"
dscl . create "/Users/${USERNAME}" UniqueID "${NEXT_UID}"
dscl . create "/Users/${USERNAME}" PrimaryGroupID 20 # staff group
dscl . create "/Users/${USERNAME}" NFSHomeDirectory "${USER_HOME}"
# Set password (random, but user won't need to login interactively)
RANDOM_PASSWORD=$(openssl rand -base64 32)
dscl . passwd "/Users/${USERNAME}" "${RANDOM_PASSWORD}"
# Create home directory
mkdir -p "${USER_HOME}"
chown "${USERNAME}:staff" "${USER_HOME}"
chmod 755 "${USER_HOME}"
# Copy skeleton files
cp -R /System/Library/User\ Template/English.lproj/. "${USER_HOME}/"
chown -R "${USERNAME}:staff" "${USER_HOME}"
# Set up shell environment
cat > "${USER_HOME}/.zshrc" << 'EOF'
# Buildkite job environment
export PATH="/usr/local/bin:/usr/local/sbin:/opt/homebrew/bin:/opt/homebrew/sbin:$PATH"
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_AUTO_UPDATE=1
export HOMEBREW_NO_ANALYTICS=1
export CI=1
export BUILDKITE=true
# Development environment
export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
# Node.js and npm
export NODE_OPTIONS="--max-old-space-size=8192"
export NPM_CONFIG_CACHE="$HOME/.npm"
# Rust
export CARGO_HOME="$HOME/.cargo"
export RUSTUP_HOME="$HOME/.rustup"
export PATH="$HOME/.cargo/bin:$PATH"
# Go
export GOPATH="$HOME/go"
export PATH="$GOPATH/bin:$PATH"
# Python
export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"
# Bun
export BUN_INSTALL="$HOME/.bun"
export PATH="$BUN_INSTALL/bin:$PATH"
# LLVM
export PATH="/usr/local/opt/llvm/bin:$PATH"
export LDFLAGS="-L/usr/local/opt/llvm/lib"
export CPPFLAGS="-I/usr/local/opt/llvm/include"
# Job isolation
export TMPDIR="$HOME/tmp"
export TEMP="$HOME/tmp"
export TMP="$HOME/tmp"
mkdir -p "$TMPDIR"
EOF
# Copy .zshrc to other shell profiles
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bash_profile"
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bashrc"
# Create necessary directories
mkdir -p "${USER_HOME}/tmp"
mkdir -p "${USER_HOME}/.npm"
mkdir -p "${USER_HOME}/.cargo"
mkdir -p "${USER_HOME}/.rustup"
mkdir -p "${USER_HOME}/go"
mkdir -p "${USER_HOME}/.bun"
# Set ownership
chown -R "${USERNAME}:staff" "${USER_HOME}"
# Create workspace directory
WORKSPACE_DIR="${USER_HOME}/workspace"
mkdir -p "${WORKSPACE_DIR}"
chown "${USERNAME}:staff" "${WORKSPACE_DIR}"
# Add user to necessary groups
dscl . append /Groups/admin GroupMembership "${USERNAME}"
dscl . append /Groups/wheel GroupMembership "${USERNAME}"
dscl . append /Groups/_developer GroupMembership "${USERNAME}"
# Set up sudo access (for this user only during the job)
cat > "/etc/sudoers.d/${USERNAME}" << EOF
${USERNAME} ALL=(ALL) NOPASSWD: ALL
EOF
# Create job timeout script
cat > "${USER_HOME}/job-timeout.sh" << 'EOF'
#!/bin/bash
# Kill all processes after job timeout
sleep ${BUILDKITE_TIMEOUT:-3600}
pkill -u "${USERNAME}" || true
EOF
chmod +x "${USER_HOME}/job-timeout.sh"
chown "${USERNAME}:staff" "${USER_HOME}/job-timeout.sh"
print "Build user ${USERNAME} created successfully"
print "Home directory: ${USER_HOME}"
print "Workspace directory: ${WORKSPACE_DIR}"
# Output user info for the calling script
echo "BK_USER=${USERNAME}"
echo "BK_HOME=${USER_HOME}"
echo "BK_WORKSPACE=${WORKSPACE_DIR}"
echo "BK_UID=${NEXT_UID}"

View File

@@ -1,242 +0,0 @@
#!/bin/bash
# Main job runner script that manages the lifecycle of Buildkite jobs
# This script creates users, runs jobs, and cleans up afterward
set -euo pipefail
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
error() {
print "ERROR: $*" >&2
exit 1
}
# Ensure running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
# Configuration
BUILDKITE_AGENT_TOKEN="${BUILDKITE_AGENT_TOKEN:-}"
BUILDKITE_QUEUE="${BUILDKITE_QUEUE:-default}"
BUILDKITE_TAGS="${BUILDKITE_TAGS:-queue=$BUILDKITE_QUEUE,os=macos,arch=$(uname -m)}"
LOG_DIR="/usr/local/var/log/buildkite-agent"
AGENT_CONFIG_DIR="/usr/local/var/buildkite-agent"
# Ensure directories exist
mkdir -p "$LOG_DIR"
mkdir -p "$AGENT_CONFIG_DIR"
# Function to cleanup on exit
cleanup() {
local exit_code=$?
print "Job runner exiting with code $exit_code"
# Clean up current user if set
if [[ -n "${CURRENT_USER:-}" ]]; then
print "Cleaning up user: $CURRENT_USER"
/usr/local/bin/bun-ci/cleanup-build-user.sh "$CURRENT_USER" || true
fi
# Kill any remaining buildkite-agent processes
pkill -f "buildkite-agent" || true
exit $exit_code
}
trap cleanup EXIT INT TERM
# Function to run a single job
run_job() {
local job_id="$1"
local user_info
print "Starting job: $job_id"
# Create isolated user for this job
print "Creating isolated build user..."
user_info=$(/usr/local/bin/bun-ci/create-build-user.sh)
# Parse user info
export BK_USER=$(echo "$user_info" | grep "BK_USER=" | cut -d= -f2)
export BK_HOME=$(echo "$user_info" | grep "BK_HOME=" | cut -d= -f2)
export BK_WORKSPACE=$(echo "$user_info" | grep "BK_WORKSPACE=" | cut -d= -f2)
export BK_UID=$(echo "$user_info" | grep "BK_UID=" | cut -d= -f2)
CURRENT_USER="$BK_USER"
print "Job will run as user: $BK_USER"
print "Workspace: $BK_WORKSPACE"
# Create job-specific configuration
local job_config="${AGENT_CONFIG_DIR}/buildkite-agent-${job_id}.cfg"
cat > "$job_config" << EOF
token="${BUILDKITE_AGENT_TOKEN}"
name="macos-$(hostname)-${job_id}"
tags="${BUILDKITE_TAGS}"
build-path="${BK_WORKSPACE}"
hooks-path="/usr/local/bin/bun-ci/hooks"
plugins-path="${BK_HOME}/.buildkite-agent/plugins"
git-clean-flags="-fdq"
git-clone-flags="-v"
shell="/bin/bash -l"
spawn=1
priority=normal
disconnect-after-job=true
disconnect-after-idle-timeout=300
cancel-grace-period=10
enable-job-log-tmpfile=true
job-log-tmpfile-path="/tmp/buildkite-job-${job_id}.log"
timestamp-lines=true
EOF
# Set permissions
chown "$BK_USER:staff" "$job_config"
chmod 600 "$job_config"
# Start timeout monitor in background
(
sleep "${BUILDKITE_TIMEOUT:-3600}"
print "Job timeout reached, killing all processes for user $BK_USER"
pkill -TERM -u "$BK_USER" || true
sleep 10
pkill -KILL -u "$BK_USER" || true
) &
local timeout_pid=$!
# Run buildkite-agent as the isolated user
print "Starting Buildkite agent for job $job_id..."
local agent_exit_code=0
sudo -u "$BK_USER" -H /usr/local/bin/buildkite-agent start \
--config "$job_config" \
--log-level info \
--no-color \
2>&1 | tee -a "$LOG_DIR/job-${job_id}.log" || agent_exit_code=$?
# Kill timeout monitor
kill $timeout_pid 2>/dev/null || true
print "Job $job_id completed with exit code: $agent_exit_code"
# Clean up job-specific files
rm -f "$job_config"
rm -f "/tmp/buildkite-job-${job_id}.log"
# Clean up the user
print "Cleaning up user $BK_USER..."
/usr/local/bin/bun-ci/cleanup-build-user.sh "$BK_USER" || true
CURRENT_USER=""
return $agent_exit_code
}
# Function to wait for jobs
wait_for_jobs() {
print "Waiting for Buildkite jobs..."
# Check for required configuration
if [[ -z "$BUILDKITE_AGENT_TOKEN" ]]; then
error "BUILDKITE_AGENT_TOKEN is required"
fi
# Main loop to handle jobs
while true; do
# Generate unique job ID
local job_id=$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)
print "Ready to accept job with ID: $job_id"
# Try to run a job
if ! run_job "$job_id"; then
print "Job $job_id failed, continuing..."
fi
# Brief pause before accepting next job
sleep 5
# Clean up any remaining processes
print "Performing system cleanup..."
pkill -f "buildkite-agent" || true
# Clean up temporary files
find /tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
find /var/tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
# Clean up any orphaned users (safety net)
for user in $(dscl . list /Users | grep "^bk-"); do
if [[ -n "$user" ]]; then
print "Cleaning up orphaned user: $user"
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
fi
done
# Free up memory
sync
purge || true
print "System cleanup completed, ready for next job"
done
}
# Function to perform health checks
health_check() {
print "Performing health check..."
# Check disk space
local disk_usage=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
if [[ $disk_usage -gt 90 ]]; then
error "Disk usage is too high: ${disk_usage}%"
fi
# Check memory
local memory_pressure=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
if [[ $memory_pressure -lt 10 ]]; then
error "Memory pressure is too high: ${memory_pressure}% free"
fi
# Check if Docker is running
if ! pgrep -x "Docker" > /dev/null; then
print "Docker is not running, attempting to start..."
open -a Docker || true
sleep 30
fi
# Check if required commands are available
local required_commands=("git" "node" "npm" "bun" "python3" "go" "rustc" "cargo" "cmake" "make")
for cmd in "${required_commands[@]}"; do
if ! command -v "$cmd" &>/dev/null; then
error "Required command not found: $cmd"
fi
done
print "Health check passed"
}
# Main execution
case "${1:-start}" in
start)
print "Starting Buildkite job runner for macOS"
health_check
wait_for_jobs
;;
health)
health_check
;;
cleanup)
print "Performing manual cleanup..."
# Clean up any existing users
for user in $(dscl . list /Users | grep "^bk-"); do
if [[ -n "$user" ]]; then
print "Cleaning up user: $user"
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
fi
done
print "Manual cleanup completed"
;;
*)
error "Usage: $0 {start|health|cleanup}"
;;
esac

View File

@@ -1,433 +0,0 @@
terraform {
required_version = ">= 1.0"
required_providers {
macstadium = {
source = "macstadium/macstadium"
version = "~> 1.0"
}
}
backend "s3" {
bucket = "bun-terraform-state"
key = "macos-runners/terraform.tfstate"
region = "us-west-2"
}
}
provider "macstadium" {
api_key = var.macstadium_api_key
endpoint = var.macstadium_endpoint
}
# Variables
variable "macstadium_api_key" {
description = "MacStadium API key"
type = string
sensitive = true
}
variable "macstadium_endpoint" {
description = "MacStadium API endpoint"
type = string
default = "https://api.macstadium.com"
}
variable "buildkite_agent_token" {
description = "Buildkite agent token"
type = string
sensitive = true
}
variable "github_token" {
description = "GitHub token for accessing private repositories"
type = string
sensitive = true
}
variable "image_name_prefix" {
description = "Prefix for VM image names"
type = string
default = "bun-macos"
}
variable "fleet_size" {
description = "Number of VMs per macOS version"
type = object({
macos_13 = number
macos_14 = number
macos_15 = number
})
default = {
macos_13 = 4
macos_14 = 6
macos_15 = 8
}
}
variable "vm_configuration" {
description = "VM configuration settings"
type = object({
cpu_count = number
memory_gb = number
disk_size = number
})
default = {
cpu_count = 12
memory_gb = 32
disk_size = 500
}
}
# Data sources to get latest images
data "macstadium_image" "macos_13" {
name_regex = "^${var.image_name_prefix}-13-.*"
most_recent = true
}
data "macstadium_image" "macos_14" {
name_regex = "^${var.image_name_prefix}-14-.*"
most_recent = true
}
data "macstadium_image" "macos_15" {
name_regex = "^${var.image_name_prefix}-15-.*"
most_recent = true
}
# Local values
locals {
common_tags = {
Project = "bun-ci"
Environment = "production"
ManagedBy = "terraform"
Purpose = "buildkite-runners"
}
vm_configs = {
macos_13 = {
image_id = data.macstadium_image.macos_13.id
count = var.fleet_size.macos_13
version = "13"
}
macos_14 = {
image_id = data.macstadium_image.macos_14.id
count = var.fleet_size.macos_14
version = "14"
}
macos_15 = {
image_id = data.macstadium_image.macos_15.id
count = var.fleet_size.macos_15
version = "15"
}
}
}
# VM instances for each macOS version
resource "macstadium_vm" "runners" {
for_each = {
for vm_combo in flatten([
for version, config in local.vm_configs : [
for i in range(config.count) : {
key = "${version}-${i + 1}"
version = version
config = config
index = i + 1
}
]
]) : vm_combo.key => vm_combo
}
name = "bun-runner-${each.value.version}-${each.value.index}"
image_id = each.value.config.image_id
cpu_count = var.vm_configuration.cpu_count
memory_gb = var.vm_configuration.memory_gb
disk_size = var.vm_configuration.disk_size
# Network configuration
network_interface {
network_id = macstadium_network.runner_network.id
ip_address = cidrhost(macstadium_network.runner_network.cidr_block, 10 + index(keys(local.vm_configs), each.value.version) * 100 + each.value.index)
}
# Enable GPU passthrough for better performance
gpu_passthrough = true
# Enable VNC for debugging
vnc_enabled = true
# SSH configuration
ssh_keys = [macstadium_ssh_key.runner_key.id]
# Startup script
user_data = templatefile("${path.module}/user-data.sh", {
buildkite_agent_token = var.buildkite_agent_token
github_token = var.github_token
macos_version = each.value.version
vm_name = "bun-runner-${each.value.version}-${each.value.index}"
})
# Auto-start VM
auto_start = true
# Shutdown behavior
auto_shutdown = false
tags = merge(local.common_tags, {
Name = "bun-runner-${each.value.version}-${each.value.index}"
MacOSVersion = each.value.version
VmIndex = each.value.index
})
}
# Network configuration
resource "macstadium_network" "runner_network" {
name = "bun-runner-network"
cidr_block = "10.0.0.0/16"
tags = merge(local.common_tags, {
Name = "bun-runner-network"
})
}
# SSH key for VM access
resource "macstadium_ssh_key" "runner_key" {
name = "bun-runner-key"
public_key = file("${path.module}/ssh-keys/bun-runner.pub")
tags = merge(local.common_tags, {
Name = "bun-runner-key"
})
}
# Security group for runner VMs
resource "macstadium_security_group" "runner_sg" {
name = "bun-runner-sg"
description = "Security group for Bun CI runner VMs"
# SSH access
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# VNC access (for debugging)
ingress {
from_port = 5900
to_port = 5999
protocol = "tcp"
cidr_blocks = ["10.0.0.0/16"]
}
# HTTP/HTTPS outbound
egress {
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
from_port = 443
to_port = 443
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# Git (SSH)
egress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# DNS
egress {
from_port = 53
to_port = 53
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
from_port = 53
to_port = 53
protocol = "udp"
cidr_blocks = ["0.0.0.0/0"]
}
tags = merge(local.common_tags, {
Name = "bun-runner-sg"
})
}
# Load balancer for distributing jobs
resource "macstadium_load_balancer" "runner_lb" {
name = "bun-runner-lb"
load_balancer_type = "application"
# Health check configuration
health_check {
enabled = true
healthy_threshold = 2
unhealthy_threshold = 3
timeout = 5
interval = 30
path = "/health"
port = 8080
protocol = "HTTP"
}
# Target group for all runner VMs
target_group {
name = "bun-runners"
port = 8080
protocol = "HTTP"
targets = [
for vm in macstadium_vm.runners : {
id = vm.id
port = 8080
}
]
}
tags = merge(local.common_tags, {
Name = "bun-runner-lb"
})
}
# Auto-scaling configuration
resource "macstadium_autoscaling_group" "runner_asg" {
name = "bun-runner-asg"
min_size = 2
max_size = 20
desired_capacity = sum(values(var.fleet_size))
health_check_type = "ELB"
health_check_grace_period = 300
# Launch template reference
launch_template {
id = macstadium_launch_template.runner_template.id
version = "$Latest"
}
# Scaling policies
target_group_arns = [macstadium_load_balancer.runner_lb.target_group[0].arn]
tags = merge(local.common_tags, {
Name = "bun-runner-asg"
})
}
# Launch template for auto-scaling
resource "macstadium_launch_template" "runner_template" {
name = "bun-runner-template"
image_id = data.macstadium_image.macos_15.id
instance_type = "mac-mini-m2-pro"
key_name = macstadium_ssh_key.runner_key.name
security_group_ids = [macstadium_security_group.runner_sg.id]
user_data = base64encode(templatefile("${path.module}/user-data.sh", {
buildkite_agent_token = var.buildkite_agent_token
github_token = var.github_token
macos_version = "15"
vm_name = "bun-runner-asg-${timestamp()}"
}))
tags = merge(local.common_tags, {
Name = "bun-runner-template"
})
}
# CloudWatch alarms for scaling
resource "macstadium_cloudwatch_metric_alarm" "scale_up" {
alarm_name = "bun-runner-scale-up"
comparison_operator = "GreaterThanThreshold"
evaluation_periods = "2"
metric_name = "CPUUtilization"
namespace = "AWS/EC2"
period = "300"
statistic = "Average"
threshold = "80"
alarm_description = "This metric monitors ec2 cpu utilization"
alarm_actions = [macstadium_autoscaling_policy.scale_up.arn]
dimensions = {
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
}
}
resource "macstadium_cloudwatch_metric_alarm" "scale_down" {
alarm_name = "bun-runner-scale-down"
comparison_operator = "LessThanThreshold"
evaluation_periods = "2"
metric_name = "CPUUtilization"
namespace = "AWS/EC2"
period = "300"
statistic = "Average"
threshold = "20"
alarm_description = "This metric monitors ec2 cpu utilization"
alarm_actions = [macstadium_autoscaling_policy.scale_down.arn]
dimensions = {
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
}
}
# Scaling policies
resource "macstadium_autoscaling_policy" "scale_up" {
name = "bun-runner-scale-up"
scaling_adjustment = 2
adjustment_type = "ChangeInCapacity"
cooldown = 300
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
}
resource "macstadium_autoscaling_policy" "scale_down" {
name = "bun-runner-scale-down"
scaling_adjustment = -1
adjustment_type = "ChangeInCapacity"
cooldown = 300
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
}
# Outputs
output "vm_instances" {
description = "Details of created VM instances"
value = {
for key, vm in macstadium_vm.runners : key => {
id = vm.id
name = vm.name
ip_address = vm.network_interface[0].ip_address
image_id = vm.image_id
status = vm.status
}
}
}
output "load_balancer_dns" {
description = "DNS name of the load balancer"
value = macstadium_load_balancer.runner_lb.dns_name
}
output "network_id" {
description = "ID of the runner network"
value = macstadium_network.runner_network.id
}
output "security_group_id" {
description = "ID of the runner security group"
value = macstadium_security_group.runner_sg.id
}
output "autoscaling_group_name" {
description = "Name of the autoscaling group"
value = macstadium_autoscaling_group.runner_asg.name
}

View File

@@ -1,245 +0,0 @@
# VM instance outputs
output "vm_instances" {
description = "Details of all created VM instances"
value = {
for key, vm in macstadium_vm.runners : key => {
id = vm.id
name = vm.name
ip_address = vm.network_interface[0].ip_address
image_id = vm.image_id
status = vm.status
macos_version = regex("macos-([0-9]+)", key)[0]
instance_type = vm.instance_type
cpu_count = vm.cpu_count
memory_gb = vm.memory_gb
disk_size = vm.disk_size
created_at = vm.created_at
updated_at = vm.updated_at
}
}
}
output "vm_instances_by_version" {
description = "VM instances grouped by macOS version"
value = {
for version in ["13", "14", "15"] : "macos_${version}" => {
for key, vm in macstadium_vm.runners : key => {
id = vm.id
name = vm.name
ip_address = vm.network_interface[0].ip_address
status = vm.status
}
if can(regex("^${version}-", key))
}
}
}
# Network outputs
output "network_details" {
description = "Network configuration details"
value = {
network_id = macstadium_network.runner_network.id
cidr_block = macstadium_network.runner_network.cidr_block
name = macstadium_network.runner_network.name
status = macstadium_network.runner_network.status
}
}
output "security_group_details" {
description = "Security group configuration details"
value = {
security_group_id = macstadium_security_group.runner_sg.id
name = macstadium_security_group.runner_sg.name
description = macstadium_security_group.runner_sg.description
ingress_rules = macstadium_security_group.runner_sg.ingress
egress_rules = macstadium_security_group.runner_sg.egress
}
}
# Load balancer outputs
output "load_balancer_details" {
description = "Load balancer configuration details"
value = {
dns_name = macstadium_load_balancer.runner_lb.dns_name
zone_id = macstadium_load_balancer.runner_lb.zone_id
load_balancer_type = macstadium_load_balancer.runner_lb.load_balancer_type
target_group_arn = macstadium_load_balancer.runner_lb.target_group[0].arn
health_check = macstadium_load_balancer.runner_lb.health_check[0]
}
}
# Auto-scaling outputs
output "autoscaling_details" {
description = "Auto-scaling group configuration details"
value = {
asg_name = macstadium_autoscaling_group.runner_asg.name
min_size = macstadium_autoscaling_group.runner_asg.min_size
max_size = macstadium_autoscaling_group.runner_asg.max_size
desired_capacity = macstadium_autoscaling_group.runner_asg.desired_capacity
launch_template = macstadium_autoscaling_group.runner_asg.launch_template[0]
}
}
# SSH key outputs
output "ssh_key_details" {
description = "SSH key configuration details"
value = {
key_name = macstadium_ssh_key.runner_key.name
fingerprint = macstadium_ssh_key.runner_key.fingerprint
key_pair_id = macstadium_ssh_key.runner_key.id
}
}
# Image outputs
output "image_details" {
description = "Details of images used for VM creation"
value = {
macos_13 = {
id = data.macstadium_image.macos_13.id
name = data.macstadium_image.macos_13.name
description = data.macstadium_image.macos_13.description
created_date = data.macstadium_image.macos_13.creation_date
size = data.macstadium_image.macos_13.size
}
macos_14 = {
id = data.macstadium_image.macos_14.id
name = data.macstadium_image.macos_14.name
description = data.macstadium_image.macos_14.description
created_date = data.macstadium_image.macos_14.creation_date
size = data.macstadium_image.macos_14.size
}
macos_15 = {
id = data.macstadium_image.macos_15.id
name = data.macstadium_image.macos_15.name
description = data.macstadium_image.macos_15.description
created_date = data.macstadium_image.macos_15.creation_date
size = data.macstadium_image.macos_15.size
}
}
}
# Fleet statistics
output "fleet_statistics" {
description = "Statistics about the VM fleet"
value = {
total_vms = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
])
vms_by_version = {
macos_13 = var.fleet_size.macos_13
macos_14 = var.fleet_size.macos_14
macos_15 = var.fleet_size.macos_15
}
total_cpu_cores = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * var.vm_configuration.cpu_count
total_memory_gb = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * var.vm_configuration.memory_gb
total_disk_gb = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * var.vm_configuration.disk_size
}
}
# Connection information
output "connection_info" {
description = "Information for connecting to the infrastructure"
value = {
ssh_command_template = "ssh -i ~/.ssh/bun-runner admin@{vm_ip_address}"
vnc_port_range = "5900-5999"
health_check_url = "http://{vm_ip_address}:8080/health"
buildkite_tags = "queue=macos,os=macos,arch=$(uname -m)"
}
}
# Resource ARNs and IDs
output "resource_arns" {
description = "ARNs and IDs of created resources"
value = {
vm_ids = [
for vm in macstadium_vm.runners : vm.id
]
network_id = macstadium_network.runner_network.id
security_group_id = macstadium_security_group.runner_sg.id
load_balancer_arn = macstadium_load_balancer.runner_lb.arn
autoscaling_group_arn = macstadium_autoscaling_group.runner_asg.arn
launch_template_id = macstadium_launch_template.runner_template.id
}
}
# Monitoring and alerting
output "monitoring_endpoints" {
description = "Monitoring and alerting endpoints"
value = {
cloudwatch_namespace = "BunCI/MacOSRunners"
alarm_arns = [
macstadium_cloudwatch_metric_alarm.scale_up.arn,
macstadium_cloudwatch_metric_alarm.scale_down.arn
]
scaling_policy_arns = [
macstadium_autoscaling_policy.scale_up.arn,
macstadium_autoscaling_policy.scale_down.arn
]
}
}
# Cost information
output "cost_information" {
description = "Cost-related information"
value = {
estimated_hourly_cost = format("$%.2f", sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * 0.50) # Estimated cost per hour per VM
estimated_monthly_cost = format("$%.2f", sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * 0.50 * 24 * 30) # Estimated monthly cost
cost_optimization_enabled = var.cost_optimization.enable_spot_instances
}
}
# Terraform state information
output "terraform_state" {
description = "Terraform state information"
value = {
workspace = terraform.workspace
terraform_version = "~> 1.0"
provider_versions = {
macstadium = "~> 1.0"
}
last_updated = timestamp()
}
}
# Summary output for easy reference
output "deployment_summary" {
description = "Summary of the deployment"
value = {
project_name = var.project_name
environment = var.environment
region = var.region
total_vms = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
])
load_balancer_dns = macstadium_load_balancer.runner_lb.dns_name
autoscaling_enabled = var.autoscaling_enabled
backup_enabled = var.backup_config.enable_snapshots
monitoring_enabled = var.monitoring_config.enable_cloudwatch
deployment_time = timestamp()
status = "deployed"
}
}

View File

@@ -1,266 +0,0 @@
#!/bin/bash
# User data script for macOS VM initialization
# This script runs when the VM starts up
set -euo pipefail
# Variables passed from Terraform
BUILDKITE_AGENT_TOKEN="${buildkite_agent_token}"
GITHUB_TOKEN="${github_token}"
MACOS_VERSION="${macos_version}"
VM_NAME="${vm_name}"
# Logging
LOG_FILE="/var/log/vm-init.log"
exec 1> >(tee -a "$LOG_FILE")
exec 2> >(tee -a "$LOG_FILE" >&2)
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
print "Starting VM initialization for $VM_NAME (macOS $MACOS_VERSION)"
# Wait for system to be ready
print "Waiting for system to be ready..."
until ping -c1 google.com &>/dev/null; do
sleep 10
done
# Set timezone
print "Setting timezone to UTC..."
sudo systemsetup -settimezone UTC
# Configure hostname
print "Setting hostname to $VM_NAME..."
sudo scutil --set HostName "$VM_NAME"
sudo scutil --set LocalHostName "$VM_NAME"
sudo scutil --set ComputerName "$VM_NAME"
# Update system
print "Checking for system updates..."
sudo softwareupdate -i -a --no-scan || true
# Configure Buildkite agent
print "Configuring Buildkite agent..."
mkdir -p /usr/local/var/buildkite-agent
mkdir -p /usr/local/var/log/buildkite-agent
# Create Buildkite agent configuration
cat > /usr/local/var/buildkite-agent/buildkite-agent.cfg << EOF
token="$BUILDKITE_AGENT_TOKEN"
name="$VM_NAME"
tags="queue=macos,os=macos,arch=$(uname -m),version=$MACOS_VERSION,hostname=$VM_NAME"
build-path="/Users/buildkite/workspace"
hooks-path="/usr/local/bin/bun-ci/hooks"
plugins-path="/Users/buildkite/.buildkite-agent/plugins"
git-clean-flags="-fdq"
git-clone-flags="-v"
shell="/bin/bash -l"
spawn=1
priority=normal
disconnect-after-job=false
disconnect-after-idle-timeout=0
cancel-grace-period=10
enable-job-log-tmpfile=true
timestamp-lines=true
EOF
# Set up GitHub token for private repositories
print "Configuring GitHub access..."
if [[ -n "$GITHUB_TOKEN" ]]; then
# Configure git to use the token
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "https://github.com/"
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "git@github.com:"
# Configure npm to use the token
npm config set @oven-sh:registry https://npm.pkg.github.com/
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> ~/.npmrc
fi
# Set up SSH keys for GitHub (if available)
if [[ -f "/usr/local/etc/ssh/github_rsa" ]]; then
print "Configuring SSH keys for GitHub..."
mkdir -p ~/.ssh
cp /usr/local/etc/ssh/github_rsa ~/.ssh/
cp /usr/local/etc/ssh/github_rsa.pub ~/.ssh/
chmod 600 ~/.ssh/github_rsa
chmod 644 ~/.ssh/github_rsa.pub
# Configure SSH to use the key
cat > ~/.ssh/config << EOF
Host github.com
HostName github.com
User git
IdentityFile ~/.ssh/github_rsa
StrictHostKeyChecking no
EOF
fi
# Create health check endpoint
print "Setting up health check endpoint..."
cat > /usr/local/bin/health-check.sh << 'EOF'
#!/bin/bash
# Health check script for load balancer
set -euo pipefail
# Check if system is ready
if ! ping -c1 google.com &>/dev/null; then
echo "Network not ready"
exit 1
fi
# Check disk space
DISK_USAGE=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
if [[ $DISK_USAGE -gt 95 ]]; then
echo "Disk usage too high: ${DISK_USAGE}%"
exit 1
fi
# Check memory
MEMORY_PRESSURE=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
if [[ $MEMORY_PRESSURE -lt 5 ]]; then
echo "Memory pressure too high: ${MEMORY_PRESSURE}% free"
exit 1
fi
# Check if required services are running
if ! pgrep -f "job-runner.sh" > /dev/null; then
echo "Job runner not running"
exit 1
fi
echo "OK"
exit 0
EOF
chmod +x /usr/local/bin/health-check.sh
# Start simple HTTP server for health checks
print "Starting health check server..."
cat > /usr/local/bin/health-server.sh << 'EOF'
#!/bin/bash
# Simple HTTP server for health checks
PORT=8080
while true; do
echo -e "HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\n$(/usr/local/bin/health-check.sh)" | nc -l -p $PORT
done
EOF
chmod +x /usr/local/bin/health-server.sh
# Create LaunchDaemon for health check server
cat > /Library/LaunchDaemons/com.bun.health-server.plist << 'EOF'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.bun.health-server</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/health-server.sh</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/health-server.log</string>
<key>StandardErrorPath</key>
<string>/var/log/health-server.error.log</string>
</dict>
</plist>
EOF
# Load and start the health check server
sudo launchctl load /Library/LaunchDaemons/com.bun.health-server.plist
sudo launchctl start com.bun.health-server
# Configure log rotation
print "Configuring log rotation..."
cat > /etc/newsyslog.d/bun-ci.conf << 'EOF'
# Log rotation for Bun CI
/usr/local/var/log/buildkite-agent/*.log 644 5 1000 * GZ
/var/log/vm-init.log 644 5 1000 * GZ
/var/log/health-server.log 644 5 1000 * GZ
/var/log/health-server.error.log 644 5 1000 * GZ
EOF
# Restart syslog to pick up new configuration
sudo launchctl unload /System/Library/LaunchDaemons/com.apple.syslogd.plist
sudo launchctl load /System/Library/LaunchDaemons/com.apple.syslogd.plist
# Configure system monitoring
print "Setting up system monitoring..."
cat > /usr/local/bin/system-monitor.sh << 'EOF'
#!/bin/bash
# System monitoring script
LOG_FILE="/var/log/system-monitor.log"
while true; do
echo "[$(date '+%Y-%m-%d %H:%M:%S')] System Stats:" >> "$LOG_FILE"
echo " CPU: $(top -l 1 -n 0 | grep "CPU usage" | awk '{print $3}' | sed 's/%//')" >> "$LOG_FILE"
echo " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')" >> "$LOG_FILE"
echo " Disk: $(df -h / | awk 'NR==2 {print $5}')" >> "$LOG_FILE"
echo " Load: $(uptime | awk -F'load averages:' '{print $2}')" >> "$LOG_FILE"
echo " Processes: $(ps aux | wc -l)" >> "$LOG_FILE"
echo "" >> "$LOG_FILE"
sleep 300 # 5 minutes
done
EOF
chmod +x /usr/local/bin/system-monitor.sh
# Create LaunchDaemon for system monitoring
cat > /Library/LaunchDaemons/com.bun.system-monitor.plist << 'EOF'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.bun.system-monitor</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/system-monitor.sh</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOF
# Load and start the system monitor
sudo launchctl load /Library/LaunchDaemons/com.bun.system-monitor.plist
sudo launchctl start com.bun.system-monitor
# Final configuration
print "Performing final configuration..."
# Ensure all services are running
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
sudo launchctl start com.buildkite.buildkite-agent
# Create marker file to indicate initialization is complete
touch /var/tmp/vm-init-complete
echo "$(date '+%Y-%m-%d %H:%M:%S'): VM initialization completed" >> /var/tmp/vm-init-complete
print "VM initialization completed successfully!"
print "VM Name: $VM_NAME"
print "macOS Version: $MACOS_VERSION"
print "Status: Ready for Buildkite jobs"
# Log final system state
print "Final system state:"
print " Hostname: $(hostname)"
print " Uptime: $(uptime)"
print " Disk usage: $(df -h / | awk 'NR==2 {print $5}')"
print " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')"
print "Health check available at: http://$(hostname):8080/health"

View File

@@ -1,302 +0,0 @@
# Core infrastructure variables
variable "project_name" {
description = "Name of the project"
type = string
default = "bun-ci"
}
variable "environment" {
description = "Environment name"
type = string
default = "production"
}
variable "region" {
description = "MacStadium region"
type = string
default = "us-west-1"
}
# MacStadium configuration
variable "macstadium_api_key" {
description = "MacStadium API key"
type = string
sensitive = true
}
variable "macstadium_endpoint" {
description = "MacStadium API endpoint"
type = string
default = "https://api.macstadium.com"
}
# Buildkite configuration
variable "buildkite_agent_token" {
description = "Buildkite agent token"
type = string
sensitive = true
}
variable "buildkite_org" {
description = "Buildkite organization slug"
type = string
default = "bun"
}
variable "buildkite_queues" {
description = "Buildkite queues to register agents with"
type = list(string)
default = ["macos", "macos-arm64", "macos-x86_64"]
}
# GitHub configuration
variable "github_token" {
description = "GitHub token for accessing private repositories"
type = string
sensitive = true
}
variable "github_org" {
description = "GitHub organization"
type = string
default = "oven-sh"
}
# VM fleet configuration
variable "fleet_size" {
description = "Number of VMs per macOS version"
type = object({
macos_13 = number
macos_14 = number
macos_15 = number
})
default = {
macos_13 = 4
macos_14 = 6
macos_15 = 8
}
validation {
condition = alltrue([
var.fleet_size.macos_13 >= 0,
var.fleet_size.macos_14 >= 0,
var.fleet_size.macos_15 >= 0,
var.fleet_size.macos_13 + var.fleet_size.macos_14 + var.fleet_size.macos_15 > 0
])
error_message = "Fleet sizes must be non-negative and at least one version must have VMs."
}
}
variable "vm_configuration" {
description = "VM configuration settings"
type = object({
cpu_count = number
memory_gb = number
disk_size = number
})
default = {
cpu_count = 12
memory_gb = 32
disk_size = 500
}
validation {
condition = alltrue([
var.vm_configuration.cpu_count >= 4,
var.vm_configuration.memory_gb >= 16,
var.vm_configuration.disk_size >= 100
])
error_message = "VM configuration must have at least 4 CPUs, 16GB memory, and 100GB disk."
}
}
# Auto-scaling configuration
variable "autoscaling_enabled" {
description = "Enable auto-scaling for VM fleet"
type = bool
default = true
}
variable "autoscaling_config" {
description = "Auto-scaling configuration"
type = object({
min_size = number
max_size = number
desired_capacity = number
scale_up_threshold = number
scale_down_threshold = number
scale_up_adjustment = number
scale_down_adjustment = number
cooldown_period = number
})
default = {
min_size = 2
max_size = 30
desired_capacity = 10
scale_up_threshold = 80
scale_down_threshold = 20
scale_up_adjustment = 2
scale_down_adjustment = 1
cooldown_period = 300
}
}
# Image configuration
variable "image_name_prefix" {
description = "Prefix for VM image names"
type = string
default = "bun-macos"
}
variable "image_rebuild_schedule" {
description = "Cron schedule for rebuilding images"
type = string
default = "0 2 * * *" # Daily at 2 AM
}
variable "image_retention_days" {
description = "Number of days to retain old images"
type = number
default = 7
}
# Network configuration
variable "network_config" {
description = "Network configuration"
type = object({
cidr_block = string
enable_nat = bool
enable_vpn = bool
allowed_cidrs = list(string)
})
default = {
cidr_block = "10.0.0.0/16"
enable_nat = true
enable_vpn = false
allowed_cidrs = ["0.0.0.0/0"]
}
}
# Security configuration
variable "security_config" {
description = "Security configuration"
type = object({
enable_ssh_access = bool
enable_vnc_access = bool
ssh_allowed_cidrs = list(string)
vnc_allowed_cidrs = list(string)
enable_disk_encryption = bool
})
default = {
enable_ssh_access = true
enable_vnc_access = true
ssh_allowed_cidrs = ["0.0.0.0/0"]
vnc_allowed_cidrs = ["10.0.0.0/16"]
enable_disk_encryption = true
}
}
# Monitoring configuration
variable "monitoring_config" {
description = "Monitoring configuration"
type = object({
enable_cloudwatch = bool
enable_custom_metrics = bool
log_retention_days = number
alert_email = string
})
default = {
enable_cloudwatch = true
enable_custom_metrics = true
log_retention_days = 30
alert_email = "devops@oven.sh"
}
}
# Backup configuration
variable "backup_config" {
description = "Backup configuration"
type = object({
enable_snapshots = bool
snapshot_schedule = string
snapshot_retention = number
enable_cross_region = bool
})
default = {
enable_snapshots = true
snapshot_schedule = "0 4 * * *" # Daily at 4 AM
snapshot_retention = 7
enable_cross_region = false
}
}
# Cost optimization
variable "cost_optimization" {
description = "Cost optimization settings"
type = object({
enable_spot_instances = bool
spot_price_max = number
enable_hibernation = bool
idle_shutdown_timeout = number
})
default = {
enable_spot_instances = false
spot_price_max = 0.0
enable_hibernation = false
idle_shutdown_timeout = 3600 # 1 hour
}
}
# Maintenance configuration
variable "maintenance_config" {
description = "Maintenance configuration"
type = object({
maintenance_window_start = string
maintenance_window_end = string
auto_update_enabled = bool
patch_schedule = string
})
default = {
maintenance_window_start = "02:00"
maintenance_window_end = "06:00"
auto_update_enabled = true
patch_schedule = "0 3 * * 0" # Weekly on Sunday at 3 AM
}
}
# Tagging
variable "tags" {
description = "Additional tags to apply to resources"
type = map(string)
default = {}
}
# SSH key configuration
variable "ssh_key_name" {
description = "Name of the SSH key pair"
type = string
default = "bun-runner-key"
}
variable "ssh_public_key_path" {
description = "Path to the SSH public key file"
type = string
default = "~/.ssh/id_rsa.pub"
}
# Feature flags
variable "feature_flags" {
description = "Feature flags for experimental features"
type = object({
enable_gpu_passthrough = bool
enable_nested_virt = bool
enable_secure_boot = bool
enable_tpm = bool
})
default = {
enable_gpu_passthrough = true
enable_nested_virt = false
enable_secure_boot = false
enable_tpm = false
}
}

View File

@@ -1,7 +0,0 @@
import { getCommit, getSecret } from "../../scripts/utils.mjs";
console.log("Submitting...");
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
method: "POST",
});
console.log("Got status " + response.status);

View File

@@ -158,36 +158,25 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_discord_announcement() {
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
if [ -z "$value" ]; then
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local version="$1"
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local short_sha="${commit:0:7}"
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
local artifact_path="${commit}"
if [ "$version" == "canary" ]; then
local json_payload=$(cat <<EOF
{
"embeds": [{
"title": "New Bun Canary now available",
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
"color": 16023551,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}]
}
EOF
)
curl -H "Content-Type: application/json" \
-d "$json_payload" \
-sf \
"$value" >/dev/null
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
@@ -201,8 +190,6 @@ function create_release() {
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
@@ -240,7 +227,7 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_discord_announcement "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -6,3 +6,6 @@ CompileFlags:
Diagnostics:
UnusedIncludes: None
HeaderInsertion:
IncludeBlocks: Preserve # Do not auto-include headers.

View File

@@ -1,92 +0,0 @@
# Upgrading Bun's Self-Reported Node.js Version
This guide explains how to upgrade the Node.js version that Bun reports for compatibility with Node.js packages and native addons.
## Overview
Bun reports a Node.js version for compatibility with the Node.js ecosystem. This affects:
- `process.version` output
- Node-API (N-API) compatibility
- Native addon ABI compatibility
- V8 API compatibility for addons using V8 directly
## Files That Always Need Updates
### 1. Bootstrap Scripts
- `scripts/bootstrap.sh` - Update `NODEJS_VERSION=`
- `scripts/bootstrap.ps1` - Update `$NODEJS_VERSION =`
### 2. CMake Configuration
- `cmake/Options.cmake`
- `NODEJS_VERSION` - The Node.js version string (e.g., "24.3.0")
- `NODEJS_ABI_VERSION` - The ABI version number (find using command below)
### 3. Version Strings
- `src/bun.js/bindings/BunProcess.cpp`
- Update `Bun__versions_node` with the Node.js version
- Update `Bun__versions_v8` with the V8 version (find using command below)
### 4. N-API Version
- `src/napi/js_native_api.h`
- Update `NAPI_VERSION` define (check Node.js release notes)
## Files That May Need Updates
Only check these if the build fails or tests crash after updating version numbers:
- V8 compatibility files in `src/bun.js/bindings/v8/` (if V8 API changed)
- Test files (if Node.js requires newer C++ standard)
## Quick Commands to Find Version Info
```bash
# Get latest Node.js version info
curl -s https://nodejs.org/dist/index.json | jq '.[0]'
# Get V8 version for a specific Node.js version (replace v24.3.0)
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep V8_VERSION
# Get ABI version for a specific Node.js version
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep NODE_MODULE_VERSION
# Or use the ABI registry
curl -s https://raw.githubusercontent.com/nodejs/node/main/doc/abi_version_registry.json | jq '.NODE_MODULE_VERSION."<version>"'
```
## Update Process
1. **Gather version info** using the commands above
2. **Update the required files** listed in the sections above
3. **Build and test**:
```bash
bun bd
bun bd -e "console.log(process.version)"
bun bd -e "console.log(process.versions.v8)"
bun bd test test/v8/v8.test.ts
bun bd test test/napi/napi.test.ts
```
4. **Check for V8 API changes** only if build fails or tests crash:
- Compare v8-function-callback.h between versions
- Check v8-internal.h for Isolate size changes
- Look for new required APIs in build errors
## If Build Fails or Tests Crash
The V8 API rarely has breaking changes between minor Node.js versions. If you encounter issues:
1. Check build errors for missing symbols or type mismatches
2. Compare V8 headers between old and new Node.js versions
3. Most issues can be resolved by implementing missing functions or adjusting structures
## Testing Checklist
- [ ] `process.version` returns correct version
- [ ] `process.versions.v8` returns correct V8 version
- [ ] `process.config.variables.node_module_version` returns correct ABI
- [ ] V8 tests pass
- [ ] N-API tests pass
## Notes
- Most upgrades only require updating version numbers
- Major V8 version changes (rare) may require API updates
- The V8 shim implements only APIs used by common native addons

View File

@@ -1,23 +0,0 @@
Upgrade Bun's Webkit fork to the latest upstream version of Webkit.
To do that:
- cd vendor/WebKit
- git fetch upstream
- git merge upstream main
- Fix the merge conflicts
- cd ../../ (back to bun)
- make jsc-build (this will take about 7 minutes)
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
- cd vendor/WebKit
- git commit -am "Upgrade Webkit to the latest version"
- git push
- get the commit SHA in the vendor/WebKit directory of your new commit
- cd ../../ (back to bun)
- Update WEBKIT_VERSION in cmake/tools/SetupWebKit.cmake to the commit SHA of your new commit
- git checkout -b bun/webkit-upgrade-<commit-sha>
- commit + push (without adding the webkit-changes.md file)
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
- delete the webkit-changes.md file

View File

@@ -1,10 +0,0 @@
{
"snapshot": "snapshot-20250706-71021aff-cc0d-4a7f-a468-d443b16c4bf1",
"install": "bun install",
"terminals": [
{
"name": "bun build",
"command": "bun run build"
}
]
}

View File

@@ -1,41 +0,0 @@
---
description:
globs: src/**/*.cpp,src/**/*.zig
alwaysApply: false
---
### Build Commands
- **Build debug version**: `bun bd` or `bun run build:debug`
- Creates a debug build at `./build/debug/bun-debug`
- Compilation takes ~2.5 minutes
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
### Run a file
To run a file, use:
```sh
bun bd <file> <...args>
```
**CRITICAL**: Never use `bun <file>` directly. It will not have your changes.
### Logging
`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope.
Debug logs look like this:
```zig
const log = bun.Output.scoped(.${SCOPE}, false);
// ...later
log("MY DEBUG LOG", .{})
```
### Code Generation
Code generation happens automatically as part of the build process. There are no commands to run.

View File

@@ -1,139 +0,0 @@
---
description: Writing HMR/Dev Server tests
globs: test/bake/*
---
# Writing HMR/Dev Server tests
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
## File Structure
- `test/bake/bake-harness.ts` - shared utilities and test harness
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
- class `Dev` (controls subprocess for dev server)
- class `Client` (controls a happy-dom subprocess for having the page open)
- more helpers
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
## Categories
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
css.test.ts - CSS tests concern bundling bugs with CSS files
plugins.test.ts - Plugin tests concern plugins in development mode.
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
esm.test.ts - ESM tests are about various esm features in development mode.
html.test.ts - HTML tests are tests relating to HTML files themselves.
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
sourcemap.test.ts - Tests verifying source-maps are correct.
## `devTest` Basics
A test takes in two primary inputs: `files` and `async test(dev) {`
```ts
import { devTest, emptyHtmlFile } from "../bake-harness";
devTest("html file is watched", {
files: {
"index.html": emptyHtmlFile({
scripts: ["/script.ts"],
body: "<h1>Hello</h1>",
}),
"script.ts": `
console.log("hello");
`,
},
async test(dev) {
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.patch("index.html", {
find: "Hello",
replace: "World",
});
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
// Works
await using c = await dev.client("/");
await c.expectMessage("hello");
// Editing HTML reloads
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "World",
replace: "Hello",
});
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "Hello",
replace: "Bar",
});
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("script.ts", {
find: "hello",
replace: "world",
});
});
await c.expectMessage("world");
},
});
```
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
## Testing for bundling errors
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
```ts
devTest("import then create", {
files: {
"index.html": `
<!DOCTYPE html>
<html>
<head></head>
<body>
<script type="module" src="/script.ts"></script>
</body>
</html>
`,
"script.ts": `
import data from "./data";
console.log(data);
`,
},
async test(dev) {
const c = await dev.client("/", {
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
});
await c.expectReload(async () => {
await dev.write("data.ts", "export default 'data';");
});
await c.expectMessage("data");
},
});
```
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
```ts
await dev.delete("other.ts", {
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
});
```

View File

@@ -1,413 +0,0 @@
---
description: JavaScript class implemented in C++
globs: *.cpp
alwaysApply: false
---
# Implementing JavaScript classes in C++
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
- class FooPrototype : public JSC::JSNonFinalObject
- class FooConstructor : public JSC::InternalFunction
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
Generally, defining the subspace looks like this:
```c++
class Foo : public JSC::DestructibleObject {
// ...
template<typename MyClassT, JSC::SubspaceAccess mode>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
return nullptr;
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
vm,
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
}
```
It's better to put it in the .cpp file instead of the .h file, when possible.
## Defining properties
Define properties on the prototype. Use a const HashTableValues like this:
```C++
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
};
```
### Creating a prototype class
Follow a pattern like this:
```c++
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
public:
using Base = JSC::JSNonFinalObject;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
{
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
prototype->finishCreation(vm);
return prototype;
}
template<typename, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.plainObjectSpace();
}
DECLARE_INFO;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
structure->setMayBePrototype(true);
return structure;
}
private:
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure)
{
}
void finishCreation(JSC::VM& vm);
};
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
void JSX509CertificatePrototype::finishCreation(VM& vm)
{
Base::finishCreation(vm);
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
}
} // namespace Bun
```
### Getter definition:
```C++
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
return {};
}
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
}
```
### Setter definition
```C++
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
{
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject))
return false;
JSValue value = JSValue::decode(encodedValue);
if (!value.isCell()) {
// TODO:
return true;
}
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
return true;
}
```
### Function definition
```C++
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
return {};
}
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
}
```
### Constructor definition
```C++
JSC_DECLARE_HOST_FUNCTION(callStats);
JSC_DECLARE_HOST_FUNCTION(constructStats);
class JSStatsConstructor final : public JSC::InternalFunction {
public:
using Base = JSC::InternalFunction;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
{
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
constructor->finishCreation(vm, prototype);
return constructor;
}
DECLARE_INFO;
template<typename CellType, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.internalFunctionSpace();
}
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
}
private:
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure, callStats, constructStats)
{
}
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
{
Base::finishCreation(vm, 0, "Stats"_s);
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
}
};
```
### Structure caching
If there's a class, prototype, and constructor:
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
```c++#ZigGlobalObject.cpp
void GlobalObject::finishCreation(VM& vm) {
// ...
m_JSStatsBigIntClassStructure.initLater(
[](LazyClassStructure::Initializer& init) {
// Call the function to initialize our class structure.
Bun::initJSBigIntStatsClassStructure(init);
});
```
Then, implement the function that creates the structure:
```c++
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
{
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
void GlobalObject::finishCreation(VM& vm) {
// ...
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
});
```
Then, implement the function that creates the structure:
```c++
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
{
// If there is a prototype:
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
// If there is no prototype or it only has
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
```C++
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
if (!callFrame->argumentCount()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
return {};
}
JSValue arg = callFrame->uncheckedArgument(0);
if (!arg.isCell()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
return {};
}
auto* zigGlobalObject = defaultGlobalObject(globalObject);
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
JSValue newTarget = callFrame->newTarget();
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
auto scope = DECLARE_THROW_SCOPE(vm);
if (!newTarget) {
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
return {};
}
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
RETURN_IF_EXCEPTION(scope, {});
structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
RETURN_IF_EXCEPTION(scope, {});
}
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
}
```
### Expose to Zig
To expose the constructor to zig:
```c++
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
{
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
}
```
Zig:
```zig
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
```
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
```c++
// X509* is whatever we need to create the object
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
{
// ... implementation details
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
}
```
And from Zig:
```zig
const X509 = opaque {
// ... class
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
return Bun__X509__toJS(globalObject, this);
}
};
```

View File

@@ -1,203 +0,0 @@
# Registering Functions, Objects, and Modules in Bun
This guide documents the process of adding new functionality to the Bun global object and runtime.
## Overview
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
There are several key ways to expose functionality in Bun:
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
## The Registration Process
Adding new functionality to Bun involves several coordinated steps across multiple files:
### 1. Implement the Core Functionality in Zig
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
- `src/valkey/` for Redis/Valkey client
- `src/semver/` for SemVer functionality
- `src/smtp/` for SMTP client
### 2. Create JavaScript Bindings
Create bindings that expose your Zig functionality to JavaScript:
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
Example from a class definition file:
```typescript
// Example from a .classes.ts file
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "YourFeature",
construct: true,
finalize: true,
hasPendingActivity: true,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
yourMethod: {
fn: "yourZigMethod",
length: 1,
},
property: {
getter: "getProperty",
},
},
values: ["cachedValues"],
}),
];
```
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
Add an entry to the `FOR_EACH_GETTER` macro:
```c
// In BunObject+exports.h
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
macro(CryptoHasher) \
... \
macro(YourFeature) \
```
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
Implement a getter function in `BunObject.zig` that returns your feature:
```zig
// In BunObject.zig
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
// In the exportAll() function:
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
```
### 5. Implement the Getter Function in a Relevant Zig File
Implement the function that creates your object:
```zig
// In your main module file (e.g., src/your_feature/your_feature.zig)
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.YourFeature.getConstructor(globalThis);
}
```
### 6. Add to Build System
Ensure your files are included in the build system by adding them to the appropriate targets.
## Example: Adding a New Module
Here's a comprehensive example of adding a hypothetical SMTP module:
1. Create implementation files in `src/smtp/`:
- `index.zig`: Main entry point that exports everything
- `SmtpClient.zig`: Core SMTP client implementation
- `js_smtp.zig`: JavaScript bindings
- `js_bindings.classes.ts`: Class definition
2. Define your JS class in `js_bindings.classes.ts`:
```typescript
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "EmailClient",
construct: true,
finalize: true,
hasPendingActivity: true,
configurable: false,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
send: {
fn: "send",
length: 1,
},
verify: {
fn: "verify",
length: 0,
},
close: {
fn: "close",
length: 0,
},
},
values: ["connectionPromise"],
}),
];
```
3. Add getter to `BunObject+exports.h`:
```c
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
... \
macro(SMTP) \
```
4. Add getter function to `BunObject.zig`:
```zig
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
// In exportAll:
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
```
5. Implement getter in your module:
```zig
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.JSEmailClient.getConstructor(globalThis);
}
```
## Best Practices
1. **Follow Naming Conventions**: Align your naming with existing patterns
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
3. **Memory Management**: Be careful with memory management and reference counting
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
6. **Testing**: Add tests for your new functionality
## Common Gotchas
- Be sure to handle reference counting properly with `ref()`/`deref()`
- Always implement proper cleanup in `deinit()` and `finalize()`
- For network operations, manage socket lifetimes correctly
- Use `JSC.Codegen` correctly to generate necessary binding code
## Related Files
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
## Additional Resources
For more detailed information on specific topics:
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples

View File

@@ -1,91 +0,0 @@
---
description: Writing tests for Bun
globs:
---
# Writing tests for Bun
## Where tests are found
You'll find all of Bun's tests in the `test/` directory.
* `test/`
* `cli/` - CLI command tests, like `bun install` or `bun init`
* `js/` - JavaScript & TypeScript tests
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
* `third_party/` - npm package tests, to validate that basic usage works in Bun
* `napi/` - N-API tests
* `v8/` - V8 C++ API tests
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
## How tests are written
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
```js
import { describe, test, expect } from "bun:test";
import assert, { AssertionError } from "assert";
describe("assert(expr)", () => {
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
expect(() => assert(expr)).not.toThrow();
});
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
expect(() => assert(expr)).toThrow(AssertionError);
});
});
```
## Testing conventions
* See `test/harness.ts` for common test utilities and helpers
* Be rigorous and test for edge-cases and unexpected inputs
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
* When you need to test Bun as a CLI, use the following pattern:
```js
import { test, expect } from "bun:test";
import { spawn } from "bun";
import { bunExe, bunEnv } from "harness";
test("bun --version", async () => {
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
cmd: [bunExe(), "--version"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [ exitCode, stdout, stderr ] = await Promise.all([
exited,
new Response(stdoutStream).text(),
new Response(stderrStream).text(),
]);
expect({ exitCode, stdout, stderr }).toMatchObject({
exitCode: 0,
stdout: expect.stringContaining(Bun.version),
stderr: "",
});
});
```
## Before writing a test
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
* Always attempt to find related tests in an existing test file before creating a new test file

View File

@@ -1,509 +0,0 @@
---
description: How Zig works with JavaScriptCore bindings generator
globs:
alwaysApply: false
---
# Bun's JavaScriptCore Class Bindings Generator
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
## Architecture Overview
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
1. **Zig Implementation** (.zig files)
2. **JavaScript Interface Definition** (.classes.ts files)
3. **Generated Code** (C++/Zig files that connect everything)
## Class Definition Files
### JavaScript Interface (.classes.ts)
The `.classes.ts` files define the JavaScript API using a declarative approach:
```typescript
// Example: encoding.classes.ts
define({
name: "TextDecoder",
constructor: true,
JSType: "object",
finalize: true,
proto: {
decode: {
// Function definition
args: 1,
},
encoding: {
// Getter with caching
getter: true,
cache: true,
},
fatal: {
// Read-only property
getter: true,
},
ignoreBOM: {
// Read-only property
getter: true,
},
},
});
```
Each class definition specifies:
- The class name
- Whether it has a constructor
- JavaScript type (object, function, etc.)
- Properties and methods in the `proto` field
- Caching strategy for properties
- Finalization requirements
### Zig Implementation (.zig)
The Zig files implement the native functionality:
```zig
// Example: TextDecoder.zig
pub const TextDecoder = struct {
// Expose generated bindings as `js` namespace with trait conversion methods
pub const js = JSC.Codegen.JSTextDecoder;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// Internal state
encoding: []const u8,
fatal: bool,
ignoreBOM: bool,
// Constructor implementation - note use of globalObject
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*TextDecoder {
// Implementation
return bun.new(TextDecoder, .{
// Fields
});
}
// Prototype methods - note return type includes JSError
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!JSC.JSValue {
// Implementation
}
// Getters
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
}
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.jsBoolean(this.fatal);
}
// Cleanup - note standard pattern of using deinit/deref
fn deinit(this: *TextDecoder) void {
// Release any retained resources
// Free the pointer at the end.
bun.destroy(this);
}
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
pub fn finalize(this: *TextDecoder) void {
this.deinit();
}
};
```
Key components in the Zig file:
- The struct containing native state
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
- Consistent use of `globalObject` parameter name instead of `ctx`
- Methods matching the JavaScript interface
- Getters/setters for properties
- Proper resource cleanup pattern with `deinit()` and `finalize()`
- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class
## Code Generation System
The binding generator produces C++ code that connects JavaScript and Zig:
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
3. **Method Binding**: Connects JS function calls to Zig implementations
4. **Type Conversion**: Converts between JS values and Zig types
5. **Property Caching**: Implements the caching system for properties
The generated C++ code includes:
- A JSC wrapper class (`JSTextDecoder`)
- A prototype class (`JSTextDecoderPrototype`)
- A constructor function (`JSTextDecoderConstructor`)
- Function bindings (`TextDecoderPrototype__decodeCallback`)
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
## CallFrame Access
The `CallFrame` object provides access to JavaScript execution context:
```zig
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame
) bun.JSError!JSC.JSValue {
// Get arguments
const input = callFrame.argument(0);
const options = callFrame.argument(1);
// Get this value
const thisValue = callFrame.thisValue();
// Implementation with error handling
if (input.isUndefinedOrNull()) {
return globalObject.throw("Input cannot be null or undefined", .{});
}
// Return value or throw error
return JSC.JSValue.jsString(globalObject, "result");
}
```
CallFrame methods include:
- `argument(i)`: Get the i-th argument
- `argumentCount()`: Get the number of arguments
- `thisValue()`: Get the `this` value
- `callee()`: Get the function being called
## Property Caching and GC-Owned Values
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
```typescript
encoding: {
getter: true,
cache: true, // Enable caching
}
```
### C++ Implementation
In the generated C++ code, caching uses JSC's WriteBarrier:
```cpp
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
auto& vm = JSC::getVM(lexicalGlobalObject);
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
auto throwScope = DECLARE_THROW_SCOPE(vm);
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
// Check for cached value and return if present
if (JSValue cachedValue = thisObject->m_encoding.get())
return JSValue::encode(cachedValue);
// Get value from Zig implementation
JSC::JSValue result = JSC::JSValue::decode(
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
);
RETURN_IF_EXCEPTION(throwScope, {});
// Store in cache for future access
thisObject->m_encoding.set(vm, thisObject, result);
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
}
```
### Zig Accessor Functions
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
```zig
// External function declarations
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
/// `TextDecoder.encoding` setter
/// This value will be visited by the garbage collector.
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
JSC.markBinding(@src());
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
}
/// `TextDecoder.encoding` getter
/// This value will be visited by the garbage collector.
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
JSC.markBinding(@src());
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
if (result == .zero)
return null;
return result;
}
```
### Benefits of GC-Owned Values
This system provides several key benefits:
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
3. **Consistent Access**: Zig code can easily get/set these cached JS values
4. **Performance**: Cached values avoid repeated computation or serialization
### Use Cases
GC-owned cached values are particularly useful for:
1. **Computed Properties**: Store expensive computation results
2. **Lazily Created Objects**: Create objects only when needed, then cache them
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
4. **Memoization**: Cache results based on input parameters
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
## Memory Management and Finalization
The binding system handles memory management across the JavaScript/Zig boundary:
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
Bun uses a consistent pattern for resource cleanup:
```zig
// Resource cleanup method - separate from finalization
pub fn deinit(this: *TextDecoder) void {
// Release resources like strings
this._encoding.deref(); // String deref pattern
// Free any buffers
if (this.buffer) |buffer| {
bun.default_allocator.free(buffer);
}
}
// Called by the GC when object is collected
pub fn finalize(this: *TextDecoder) void {
JSC.markBinding(@src()); // For debugging
this.deinit(); // Clean up resources
bun.default_allocator.destroy(this); // Free the object itself
}
```
Some objects that hold references to other JS objects use `.deref()` instead:
```zig
pub fn finalize(this: *SocketAddress) void {
JSC.markBinding(@src());
this._presentation.deref(); // Release references
this.destroy();
}
```
## Error Handling with JSError
Bun uses `bun.JSError!JSValue` return type for proper error handling:
```zig
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame
) bun.JSError!JSC.JSValue {
// Throwing an error
if (callFrame.argumentCount() < 1) {
return globalObject.throw("Missing required argument", .{});
}
// Or returning a success value
return JSC.JSValue.jsString(globalObject, "Success!");
}
```
This pattern allows Zig functions to:
1. Return JavaScript values on success
2. Throw JavaScript exceptions on error
3. Propagate errors automatically through the call stack
## Type Safety and Error Handling
The binding system includes robust error handling:
```cpp
// Example of type checking in generated code
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
scope.throwException(lexicalGlobalObject,
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
return {};
}
```
## Prototypal Inheritance
The binding system creates proper JavaScript prototype chains:
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
2. **Prototype**: JSTextDecoderPrototype with methods and properties
3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype
This ensures JavaScript inheritance works as expected:
```cpp
// From generated code
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
{
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
// Set up the prototype chain
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
ASSERT(inherits(info()));
}
```
## Performance Considerations
The binding system is optimized for performance:
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
## Creating a New Class Binding
To create a new class binding in Bun:
1. **Define the class interface** in a `.classes.ts` file:
```typescript
define({
name: "MyClass",
constructor: true,
finalize: true,
proto: {
myMethod: {
args: 1,
},
myProperty: {
getter: true,
cache: true,
},
},
});
```
2. **Implement the native functionality** in a `.zig` file:
```zig
pub const MyClass = struct {
// Generated bindings
pub const js = JSC.Codegen.JSMyClass;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// State
value: []const u8,
pub const new = bun.TrivialNew(@This());
// Constructor
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*MyClass {
const arg = callFrame.argument(0);
// Implementation
}
// Method
pub fn myMethod(
this: *MyClass,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!JSC.JSValue {
// Implementation
}
// Getter
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.jsString(globalObject, this.value);
}
// Resource cleanup
pub fn deinit(this: *MyClass) void {
// Clean up resources
}
pub fn finalize(this: *MyClass) void {
this.deinit();
bun.destroy(this);
}
};
```
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
- C++ class definitions
- Method and property bindings
- Memory management utilities
- GC integration code
## Generated Code Structure
The binding generator produces several components:
### 1. C++ Classes
For each Zig class, the system generates:
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
### 2. C++ Methods and Properties
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
- **Initialization Functions**: `finishCreation` methods for setting up the class
### 3. Zig Bindings
- **External Function Declarations**:
```zig
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
```
- **Cached Value Accessors**:
```zig
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
```
- **Constructor Helpers**:
```zig
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
```
### 4. GC Integration
- **Memory Cost Calculation**: `estimatedSize` method
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.

View File

@@ -1,10 +0,0 @@
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
# avoid mucking up blames.
#
# To configure git to use this, run:
#
# git config blame.ignoreRevsFile .git-blame-ignore-revs
#
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
dedd433cbf2e2fe38e51bc166e08fbcc601ad42b # JSValue.undefined -> .jsUndefined()
6b4662ff55f58247cc2fd22e85b4f9805b0950a5 # JSValue.jsUndefined() -> .js_undefined

4
.gitattributes vendored
View File

@@ -15,7 +15,6 @@
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
@@ -52,6 +51,3 @@ test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored
.vscode/*.json linguist-language=JSON-with-Comments
src/cli/init/tsconfig.default.json linguist-language=JSON-with-Comments

5
.github/CODEOWNERS vendored
View File

@@ -1,5 +0,0 @@
# Project
/.github/CODEOWNERS @Jarred-Sumner
# Tests
/test/expectations.txt @Jarred-Sumner

View File

@@ -12,7 +12,7 @@ body:
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.

View File

@@ -2,44 +2,44 @@ name: 🇹 TypeScript Type Bug Report
description: Report an issue with TypeScript types
labels: [bug, types]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -2,7 +2,6 @@ name: Prefilled crash report
description: Report a crash in Bun
labels:
- crash
- needs triage
body:
- type: markdown
attributes:

View File

@@ -4,7 +4,7 @@ description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
baseline:

View File

@@ -28,7 +28,7 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
-->

41
.github/workflows/clang-format.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: clang-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.44"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-format:
name: clang-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Clang Format
env:
LLVM_VERSION: ${{ env.LLVM_VERSION }}
run: |
bun run clang-format
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run clang-format`"

41
.github/workflows/clang-tidy.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: clang-tidy
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.44"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-tidy:
name: clang-tidy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Clang Tidy
env:
LLVM_VERSION: ${{ env.LLVM_VERSION }}
run: |
bun run clang-tidy:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run clang-tidy`"

View File

@@ -1,35 +0,0 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}

View File

@@ -1,58 +0,0 @@
name: Codex Test Sync
on:
pull_request:
types: [labeled, opened]
env:
BUN_VERSION: "1.2.15"
jobs:
sync-node-tests:
runs-on: ubuntu-latest
if: |
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
contains(github.head_ref, 'codex')
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v44
with:
files: |
test/js/node/test/parallel/**/*.{js,mjs,ts}
test/js/node/test/sequential/**/*.{js,mjs,ts}
- name: Sync tests
if: steps.changed-files.outputs.any_changed == 'true'
shell: bash
run: |
echo "Changed test files:"
echo "${{ steps.changed-files.outputs.all_changed_files }}"
# Process each changed test file
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
# Extract test name from file path
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
echo "Syncing test: $test_name"
bun node:test:cp "$test_name"
done
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Sync Node.js tests with upstream"

View File

@@ -4,7 +4,6 @@ on:
push:
paths:
- "docs/**"
- "packages/bun-types/**.d.ts"
- "CONTRIBUTING.md"
branches:
- main

View File

@@ -1,59 +0,0 @@
name: autofix.ci
permissions:
contents: read
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
push:
branches: ["main"]
env:
BUN_VERSION: "1.2.11"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
jobs:
autofix:
name: Format
runs-on: ubuntu-latest
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Configure Git
run: |
git config --global core.autocrlf true
git config --global core.ignorecase true
git config --global core.precomposeUnicode true
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Setup Zig
uses: mlugg/setup-zig@v1
with:
version: 0.14.0
- name: Zig Format
run: |
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
zig fmt src
bun scripts/sortImports src
zig fmt src
- name: Prettier Format
run: |
bun run prettier
- name: Clang Format
run: |
bun run clang-format
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@@ -1,41 +0,0 @@
name: Glob Sources
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
env:
BUN_VERSION: "1.2.11"
jobs:
glob-sources:
name: Glob Sources
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure Git
run: |
git config --global core.autocrlf true
git config --global core.ignorecase true
git config --global core.precomposeUnicode true
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Glob sources
run: bun scripts/glob-sources.mjs
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun scripts/glob-sources.mjs`"

View File

@@ -69,6 +69,7 @@ jobs:
shell: bash
run: |
LABELS=$(bun scripts/read-issue.ts)
echo "labels=$LABELS" >> $GITHUB_OUTPUT
bun scripts/is-outdated.ts
if [[ -f "is-outdated.txt" ]]; then
@@ -76,19 +77,12 @@ jobs:
fi
if [[ -f "outdated.txt" ]]; then
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "is-very-outdated.txt" ]]; then
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
LABELS="$LABELS,old-version"
else
echo "is-very-outdated=false" >> $GITHUB_OUTPUT
echo "oudated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
fi
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
echo "labels=$LABELS" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
@@ -108,14 +102,7 @@ jobs:
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Remove old labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash' && steps.add-labels.outputs.is-very-outdated == 'false'
with:
actions: "remove-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: old-version
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -132,7 +119,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
@@ -147,7 +134,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?

View File

@@ -5,7 +5,8 @@ on:
workflow_dispatch:
env:
BUN_VERSION: "1.2.10"
BUN_VERSION: "1.1.44"
OXLINT_VERSION: "0.15.0"
jobs:
lint-js:
@@ -18,4 +19,4 @@ jobs:
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Lint
run: bun lint
run: bunx oxlint --config oxlint.json --quiet --format github

View File

@@ -1,55 +0,0 @@
name: Packages CI
on:
push:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
pull_request:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
env:
BUN_VERSION: "canary"
jobs:
bun-plugin-svelte:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install dependencies
run: |
bun install
pushd ./packages/bun-plugin-svelte && bun install
- name: Lint
run: |
bunx oxlint@0.15 --format github --deny-warnings
bunx prettier --config ../../.prettierrc --check .
working-directory: ./packages/bun-plugin-svelte
- name: Check types
run: bun check:types
working-directory: ./packages/bun-plugin-svelte
- name: Test
run: bun test
working-directory: ./packages/bun-plugin-svelte

37
.github/workflows/prettier-format.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
name: prettier-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.44"
jobs:
prettier-format:
name: prettier-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Prettier Format
run: |
bun run prettier:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run prettier:extra`"

View File

@@ -44,10 +44,6 @@ on:
description: Should types be released to npm?
type: boolean
default: false
use-definitelytyped:
description: "Should types be PR'd to DefinitelyTyped?"
type: boolean
default: false
jobs:
sign:
@@ -70,7 +66,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.1.44"
- name: Install Dependencies
run: bun install
- name: Sign Release
@@ -98,7 +94,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.1.44"
- name: Install Dependencies
run: bun install
- name: Release
@@ -127,7 +123,7 @@ jobs:
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
bun-version: "1.1.44"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
@@ -159,52 +155,6 @@ jobs:
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
definitelytyped:
name: Make pr to DefinitelyTyped to update `bun-types` version
runs-on: ubuntu-latest
needs: npm-types
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
permissions:
contents: read
steps:
- name: Checkout (DefinitelyTyped)
uses: actions/checkout@v4
with:
repository: DefinitelyTyped/DefinitelyTyped
- name: Checkout (bun)
uses: actions/checkout@v4
with:
path: bun
- name: Setup Bun
uses: ./bun/.github/actions/setup-bun
with:
bun-version: "1.2.0"
- id: bun-version
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
- name: Update bun-types version in package.json
run: |
bun -e '
const file = Bun.file("./types/bun/package.json");
const json = await file.json();
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
json.dependencies["bun-types"] = version;
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
await file.write(JSON.stringify(json, null, 4) + "\n");
'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
with:
token: ${{ secrets.ROBOBUN_TOKEN }}
add-paths: ./types/bun/package.json
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
body: |
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
https://bun.com/blog/${{ env.BUN_VERSION }}
push-to-fork: oven-sh/DefinitelyTyped
branch: ${{env.BUN_VERSION}}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
@@ -231,7 +181,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
@@ -239,7 +189,7 @@ jobs:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: oven/bun
flavor: |
@@ -256,7 +206,7 @@ jobs:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
@@ -315,7 +265,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.0"
bun-version: "1.1.44"
- name: Install Dependencies
run: bun install
- name: Release
@@ -359,7 +309,7 @@ jobs:
uses: ./.github/actions/setup-bun
if: ${{ env.BUN_LATEST == 'true' }}
with:
bun-version: "1.2.0"
bun-version: "1.1.44"
- name: Bump version
uses: ./.github/actions/bump
if: ${{ env.BUN_LATEST == 'true' }}

33
.github/workflows/run-lint.yml vendored Normal file
View File

@@ -0,0 +1,33 @@
name: Lint
permissions:
contents: read
env:
LLVM_VERSION: 16
BUN_VERSION: "1.1.44"
on:
workflow_call:
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
outputs:
text_output: ${{ steps.lint.outputs.text_output }}
json_output: ${{ steps.lint.outputs.json_output }}
count: ${{ steps.lint.outputs.count }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install Dependencies
run: |
bun --cwd=packages/bun-internal-test install
- name: Lint
id: lint
run: |
bun packages/bun-internal-test/src/linter.ts

View File

@@ -9,7 +9,7 @@ on:
required: true
env:
BUN_VERSION: "1.2.0"
BUN_VERSION: "1.1.44"
jobs:
bump:

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -50,14 +50,9 @@ jobs:
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
@@ -94,6 +89,4 @@ jobs:
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -1,82 +0,0 @@
name: Daily Root Certs Update Check
on:
schedule:
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
workflow_dispatch: # Allows manual trigger
jobs:
check-and-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Generate root certs and capture output
id: generate-certs
run: |
cd packages/bun-usockets/
OUTPUT=$(bun generate-root-certs.mjs -v)
echo "cert_output<<EOF" >> $GITHUB_ENV
echo "$OUTPUT" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Check for changes and stage files
id: check-changes
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "Found changes, staging modified files..."
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
# Get list of modified files and add them
git status --porcelain | while read -r status file; do
# Remove leading status and whitespace
file=$(echo "$file" | sed 's/^.* //')
echo "Adding changed file: $file"
git add "$file"
done
echo "changes=true" >> $GITHUB_OUTPUT
# Store the list of changed files
CHANGED_FILES=$(git status --porcelain)
echo "changed_files<<EOF" >> $GITHUB_ENV
echo "$CHANGED_FILES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No changes detected"
echo "changes=false" >> $GITHUB_OUTPUT
fi
- name: Create Pull Request
if: steps.check-changes.outputs.changes == 'true'
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
title: "update(root_certs) $(date +'%Y-%m-%d')"
body: |
Automated root certificates update
${{ env.cert_output }}
## Changed Files:
```
${{ env.changed_files }}
```
branch: certs/update-root-certs-${{ github.run_number }}
base: main
delete-branch: true
labels:
- "automation"
- "root-certs"

View File

@@ -55,7 +55,7 @@ jobs:
# Convert numeric version to semantic version for display
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
@@ -106,6 +106,4 @@ jobs:
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

View File

@@ -1,99 +0,0 @@
name: Update zstd
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check zstd version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/CloneZstd.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in CloneZstd.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in CloneZstd.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/facebook/zstd/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/CloneZstd.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/CloneZstd.cmake
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-zstd-${{ github.run_number }}
body: |
## What does this PR do?
Updates zstd to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/facebook/zstd/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-zstd.yml)

34
.github/workflows/zig-format.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: zig-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.44"
jobs:
zig-format:
name: zig-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Zig Format
run: |
bun run zig-format:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run zig-format`"

8
.gitignore vendored
View File

@@ -16,7 +16,6 @@
.vscode/clang*
.vscode/cpp*
.zig-cache
.bake-debug
*.a
*.bc
*.big
@@ -36,7 +35,6 @@
*.out.refresh.js
*.pdb
*.sqlite
*.swp
*.tmp
*.trace
*.wat
@@ -153,8 +151,6 @@ src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
tmp
codegen-for-zig-team.tar.gz
# Dependencies
/vendor
@@ -182,6 +178,4 @@ codegen-for-zig-team.tar.gz
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}
*.bun-build
scratch*.{js,ts,tsx,cjs,mjs}

View File

@@ -1 +1,4 @@
command source -C -s true -e true misctools/lldb/init.lldb
# command script import vendor/zig/tools/lldb_pretty_printers.py
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"

View File

@@ -7,4 +7,3 @@ src/react-refresh.js
*.min.js
test/snippets
test/js/node/test
bun.lock

968
.vscode/launch.json generated vendored

File diff suppressed because it is too large Load Diff

16
.vscode/settings.json vendored
View File

@@ -30,22 +30,18 @@
"zig.initialSetupDone": true,
"zig.buildOption": "build",
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"],
"zig.zls.buildOnSaveStep": "check",
// "zig.zls.enableBuildOnSave": true,
// "zig.buildOnSave": true,
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
"zig.formattingProvider": "zls",
"zig.zls.enableInlayHints": false,
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "ziglang.vscode-zig",
"editor.codeActionsOnSave": {
"source.organizeImports": "never",
},
},
// lldb
@@ -67,7 +63,6 @@
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["--header-insertion=never"],
// JavaScript
"prettier.enable": true,
@@ -139,15 +134,13 @@
"**/*.xcscheme": true,
"**/*.xcodeproj": true,
"**/*.i": true,
// uws WebSocket.cpp conflicts with webcore WebSocket.cpp
"packages/bun-uws/fuzzing": true,
},
"files.associations": {
"*.css": "tailwindcss",
"*.idl": "cpp",
"*.mdc": "markdown",
"array": "cpp",
"ios": "cpp",
"oxlint.json": "jsonc",
"bun.lock": "jsonc",
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -168,5 +161,4 @@
"WebKit/WebInspectorUI": true,
},
"git.detectSubmodules": false,
"bun.test.customScript": "bun-debug test"
}

View File

@@ -1 +0,0 @@
CLAUDE.md

245
CLAUDE.md
View File

@@ -1,245 +0,0 @@
This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed for speed, with a bundler, test runner, and Node.js-compatible package manager. It's written primarily in Zig with C++ for JavaScriptCore integration, powered by WebKit's JavaScriptCore engine.
## Building and Running Bun
### Build Commands
- **Build debug version**: `bun bd` or `bun run build:debug`
- Creates a debug build at `./build/debug/bun-debug`
- Compilation takes ~2.5 minutes
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
### Other Build Variants
- `bun run build:release` - Release build
Address sanitizer is enabled by default in debug builds of Bun.
## Testing
### Running Tests
- **Single test file**: `bun bd test test/js/bun/http/serve.test.ts`
- **Fuzzy match test file**: `bun bd test http/serve.test.ts`
- **With filter**: `bun bd test test/js/bun/http/serve.test.ts -t "should handle"`
### Test Organization
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
- `test/js/node/` - Node.js compatibility tests
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
- `test/cli/` - CLI command tests (install, run, test, etc.)
- `test/regression/issue/` - Regression tests (create one per bug fix)
- `test/bundler/` - Bundler and transpiler tests
- `test/integration/` - End-to-end integration tests
- `test/napi/` - N-API compatibility tests
- `test/v8/` - V8 C++ API compatibility tests
### Writing Tests
Tests use Bun's Jest-compatible test runner with proper test fixtures:
```typescript
import { test, expect } from "bun:test";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
test("my feature", async () => {
// Create temp directory with test files
const dir = tempDirWithFiles("test-prefix", {
"index.js": `console.log("hello");`,
});
// Spawn Bun process
await using proc = Bun.spawn({
cmd: [bunExe(), "index.js"],
env: bunEnv,
cwd: dir,
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(0);
expect(stdout).toBe("hello\n");
});
```
## Code Architecture
### Language Structure
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
### Core Source Organization
#### Runtime Core (`src/`)
- `bun.zig` - Main entry point
- `cli.zig` - CLI command orchestration
- `js_parser.zig`, `js_lexer.zig`, `js_printer.zig` - JavaScript parsing/printing
- `transpiler.zig` - Wrapper around js_parser with sourcemap support
- `resolver/` - Module resolution system
- `allocators/` - Custom memory allocators for performance
#### JavaScript Runtime (`src/bun.js/`)
- `bindings/` - C++ JavaScriptCore bindings
- Generated classes from `.classes.ts` files
- Manual bindings for complex APIs
- `api/` - Bun-specific APIs
- `server.zig` - HTTP server implementation
- `FFI.zig` - Foreign Function Interface
- `crypto.zig` - Cryptographic operations
- `glob.zig` - File pattern matching
- `node/` - Node.js compatibility layer
- Module implementations (fs, path, crypto, etc.)
- Process and Buffer APIs
- `webcore/` - Web API implementations
- `fetch.zig` - Fetch API
- `streams.zig` - Web Streams
- `Blob.zig`, `Response.zig`, `Request.zig`
- `event_loop/` - Event loop and task management
#### Build Tools & Package Manager
- `src/bundler/` - JavaScript bundler
- Advanced tree-shaking
- CSS processing
- HTML handling
- `src/install/` - Package manager
- `lockfile/` - Lockfile handling
- `npm.zig` - npm registry client
- `lifecycle_script_runner.zig` - Package scripts
#### Other Key Components
- `src/shell/` - Cross-platform shell implementation
- `src/css/` - CSS parser and processor
- `src/http/` - HTTP client implementation
- `websocket_client/` - WebSocket client (including deflate support)
- `src/sql/` - SQL database integrations
- `src/bake/` - Server-side rendering framework
### JavaScript Class Implementation (C++)
When implementing JavaScript classes in C++:
1. Create three classes if there's a public constructor:
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
- `class FooPrototype : public JSC::JSNonFinalObject`
- `class FooConstructor : public JSC::InternalFunction`
2. Define properties using HashTableValue arrays
3. Add iso subspaces for classes with C++ fields
4. Cache structures in ZigGlobalObject
## Development Workflow
### Code Formatting
- `bun run prettier` - Format JS/TS files
- `bun run zig-format` - Format Zig files
- `bun run clang-format` - Format C++ files
### Watching for Changes
- `bun run watch` - Incremental Zig compilation with error checking
- `bun run watch-windows` - Windows-specific watch mode
### Code Generation
Code generation happens automatically as part of the build process. The main scripts are:
- `src/codegen/generate-classes.ts` - Generates Zig & C++ bindings from `*.classes.ts` files
- `src/codegen/generate-jssink.ts` - Generates stream-related classes
- `src/codegen/bundle-modules.ts` - Bundles built-in modules like `node:fs`
- `src/codegen/bundle-functions.ts` - Bundles global functions like `ReadableStream`
In development, bundled modules can be reloaded without rebuilding Zig by running `bun run build`.
## JavaScript Modules (`src/js/`)
Built-in JavaScript modules use special syntax and are organized as:
- `node/` - Node.js compatibility modules (`node:fs`, `node:path`, etc.)
- `bun/` - Bun-specific modules (`bun:ffi`, `bun:sqlite`, etc.)
- `thirdparty/` - NPM modules we replace (like `ws`)
- `internal/` - Internal modules not exposed to users
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
### Special Syntax in Built-in Modules
1. **`$` prefix** - Access to private properties and JSC intrinsics:
```js
const arr = $Array.from(...); // Private global
map.$set(...); // Private method
const arr2 = $newArrayWithSize(5); // JSC intrinsic
```
2. **`require()`** - Must use string literals, resolved at compile time:
```js
const fs = require("fs"); // Directly loads by numeric ID
```
3. **Debug helpers**:
- `$debug()` - Like console.log but stripped in release builds
- `$assert()` - Assertions stripped in release builds
- `if($debug) {}` - Check if debug env var is set
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
5. **Export syntax**: Use `export default` which gets converted to a return statement:
```js
export default {
readFile,
writeFile,
};
```
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
## CI
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
```bash
bun ci
```
## Important Development Notes
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
3. **Follow existing code style** - check neighboring files for patterns
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
5. **Use absolute paths** - Always use absolute paths in file operations
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
## Key APIs and Features
### Bun-Specific APIs
- **Bun.serve()** - High-performance HTTP server
- **Bun.spawn()** - Process spawning with better performance than Node.js
- **Bun.file()** - Fast file I/O operations
- **Bun.write()** - Unified API for writing to files, stdout, etc.
- **Bun.$ (Shell)** - Cross-platform shell scripting
- **Bun.SQLite** - Native SQLite integration
- **Bun.FFI** - Call native libraries from JavaScript
- **Bun.Glob** - Fast file pattern matching

View File

@@ -12,12 +12,6 @@ list(APPEND CMAKE_MODULE_PATH
include(Policies)
include(Globals)
if (CMAKE_HOST_WIN32)
# Workaround for TLS certificate verification issue on Windows when downloading from GitHub
# Remove this once we've bumped the CI machines build image
set(CMAKE_TLS_VERIFY 0)
endif()
# --- Compilers ---
if(CMAKE_HOST_APPLE)

View File

@@ -1,6 +1,6 @@
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
If you are using Windows, please refer to [this guide](https://bun.com/docs/project/building-windows)
If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows)
## Install Dependencies
@@ -37,7 +37,7 @@ Before starting, you will need to already have a release build of Bun installed,
{% codetabs %}
```bash#Native
$ curl -fsSL https://bun.com/install | bash
$ curl -fsSL https://bun.sh/install | bash
```
```bash#npm
@@ -53,17 +53,17 @@ $ brew install bun
## Install LLVM
Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
```bash#macOS (Homebrew)
$ brew install llvm@19
$ brew install llvm@18
```
```bash#Ubuntu/Debian
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 19 all
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
```
```bash#Arch
@@ -71,21 +71,21 @@ $ sudo pacman -S llvm clang lld
```
```bash#Fedora
$ sudo dnf install llvm clang lld-devel
$ sudo dnf install llvm18 clang18 lld18-devel
```
```bash#openSUSE Tumbleweed
$ sudo zypper install clang19 lld19 llvm19
$ sudo zypper install clang18 lld18 llvm18
```
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
Make sure Clang/LLVM 19 is in your path:
Make sure Clang/LLVM 18 is in your path:
```bash
$ which clang-19
$ which clang-18
```
If not, run this to manually add it:
@@ -94,13 +94,13 @@ If not, run this to manually add it:
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
# use path+="$(brew --prefix llvm@19)/bin" if you are using zsh
$ export PATH="$(brew --prefix llvm@19)/bin:$PATH"
# use path+="$(brew --prefix llvm@18)/bin" if you are using zsh
$ export PATH="$(brew --prefix llvm@18)/bin:$PATH"
```
```bash#Arch
# use fish_add_path if you're using fish
$ export PATH="$PATH:/usr/lib/llvm19/bin"
$ export PATH="$PATH:/usr/lib/llvm18/bin"
```
{% /codetabs %}
@@ -134,24 +134,6 @@ We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-deb
$ bun-debug
```
## Running debug builds
The `bd` package.json script compiles and runs a debug build of Bun, only printing the output of the build process if it fails.
```sh
$ bun bd <args>
$ bun bd test foo.test.ts
$ bun bd ./foo.ts
```
Bun generally takes about 2.5 minutes to compile a debug build when there are Zig changes. If your development workflow is "change one line, save, rebuild", you will spend too much time waiting for the build to finish. Instead:
- Batch up your changes
- Ensure zls is running with incremental watching for LSP errors (if you use VSCode and install Zig and run `bun run build` once to download Zig, this should just work)
- Prefer using the debugger ("CodeLLDB" in VSCode) to step through the code.
- Use debug logs. `BUN_DEBUG_<scope>=1` will enable debug logging for the corresponding `Output.scoped(.<scope>, false)` logs. You can also set `BUN_DEBUG_QUIET_LOGS=1` to disable all debug logging that isn't explicitly enabled. To dump debug lgos into a file, `BUN_DEBUG=<path-to-file>.log`. Debug logs are aggressively removed in release builds.
- src/js/\*\*.ts changes are pretty much instant to rebuild. C++ changes are a bit slower, but still much faster than the Zig code (Zig is one compilation unit, C++ is many).
## Code generation scripts
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
@@ -187,7 +169,6 @@ To run a release build from a pull request, you can use the `bun-pr` npm package
bunx bun-pr <pr-number>
bunx bun-pr <branch-name>
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
bunx bun-pr --asan <pr-number> # Linux x64 only
```
This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`.
@@ -198,18 +179,24 @@ bun-1234566 --version
This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub.
## AddressSanitizer
## Valgrind
[AddressSanitizer](https://en.wikipedia.org/wiki/AddressSanitizer) helps find memory issues, and is enabled by default in debug builds of Bun on Linux and macOS. This includes the Zig code and all dependencies. It makes the Zig code take about 2x longer to build, if that's stopping you from being productive you can disable it by setting `-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>` to `-Denable_asan=false` in the `cmake/targets/BuildBun.cmake` file, but generally we recommend batching your changes up between builds.
On Linux, valgrind can help find memory issues.
To build a release build with Address Sanitizer, run:
Keep in mind:
- JavaScriptCore doesn't support valgrind. It will report spurious errors.
- Valgrind is slow
- Mimalloc will sometimes cause spurious errors when debug build is enabled
You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You may need to manually compile Valgrind instead of using it from your Linux package manager.
`--fair-sched=try` is necessary if running multithreaded code in Bun (such as the bundler). Otherwise it will hang.
```bash
$ bun run build:release:asan
$ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
```
In CI, we run our test suite with at least one target that is built with Address Sanitizer.
## Building WebKit locally + Debug mode of JSC
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
@@ -218,33 +205,18 @@ WebKit is not cloned by default (to save time and disk space). To clone and buil
# Clone WebKit into ./vendor/WebKit
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
$ git -C vendor/WebKit checkout <commit_hash>
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
$ make jsc-debug
# Build bun with the local JSC build
$ bun run build:local
```
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
- In [`build.zig`](/build.zig), the `codegen_path` option should be `build/debug-local/codegen` (instead of `build/debug/codegen`)
- In [`.vscode/launch.json`](/.vscode/launch.json), many configurations use `./build/debug/`, change them as you see fit
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
## Troubleshooting
### 'span' file not found on Ubuntu
@@ -266,7 +238,7 @@ The issue may manifest when initially running `bun setup` as Clang being unable
```
The C++ compiler
"/usr/bin/clang++-19"
"/usr/bin/clang++-18"
is not able to compile a simple test program.
```

2
LATEST
View File

@@ -1 +1 @@
1.2.18
1.1.44

View File

@@ -91,9 +91,9 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-19 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-19 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-19 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-16 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-16 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-16 2>/dev/null || which clang-format 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -117,14 +117,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@19)
LLVM_PREFIX = $(shell brew --prefix llvm@16)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@19' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@16' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -164,7 +164,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-19-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_CXX_STANDARD=20 \
-DCMAKE_C_STANDARD=17 \
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
@@ -191,7 +191,7 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-19 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-16 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
@@ -255,7 +255,7 @@ DEFAULT_LINKER_FLAGS= -pthread -ldl
endif
ifeq ($(OS_NAME),darwin)
_MIMALLOC_OBJECT_FILE = 0
JSC_BUILD_STEPS += jsc-build-mac
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
JSC_BUILD_STEPS_DEBUG += jsc-build-mac-debug
_MIMALLOC_FILE = libmimalloc.a
_MIMALLOC_INPUT_PATH = libmimalloc.a
@@ -286,7 +286,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-19 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-16 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
endif
@@ -482,7 +482,7 @@ STATIC_MUSL_FLAG ?=
WRAP_SYMBOLS_ON_LINUX =
ifeq ($(OS_NAME), linux)
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=exp2 -Wl,--wrap=log -Wl,--wrap=log2 \
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=log -Wl,--wrap=log2 \
-Wl,--wrap=lstat \
-Wl,--wrap=stat \
-Wl,--wrap=fstat \
@@ -674,7 +674,7 @@ endif
.PHONY: assert-deps
assert-deps:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "19" ]; then echo -e "ERROR: clang version >=19 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@19"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@16"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@@ -924,7 +924,7 @@ bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build
jsc: jsc-build jsc-copy-headers jsc-bindings
.PHONY: jsc-debug
jsc-debug: jsc-build-debug
.PHONY: jsc-build
@@ -980,7 +980,7 @@ release-create-auto-updater:
.PHONY: release-create
release-create:
gh release create --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)"
gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/oven-sh/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install bun. Join bun's Discord to get access https://bun.com/discord"
gh release create --repo=$(BUN_AUTO_UPDATER_REPO) --title "bun v$(PACKAGE_JSON_VERSION)" "$(BUN_BUILD_TAG)" -n "See https://github.com/oven-sh/bun/releases/tag/$(BUN_BUILD_TAG) for release notes. Using the install script or bun upgrade is the recommended way to install bun. Join bun's Discord to get access https://bun.sh/discord"
release-bin-entitlements:
@@ -1154,7 +1154,7 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
@@ -1183,8 +1183,6 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/GlobalCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GlobalCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ProgramCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ProgramCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
@@ -1207,7 +1205,7 @@ jsc-copy-headers-debug:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArgumentsImpl.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArgumentsImpl.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
@@ -1236,8 +1234,6 @@ jsc-copy-headers-debug:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/GlobalCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GlobalCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ProgramCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ProgramCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
find $(WEBKIT_DEBUG_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
@@ -1265,7 +1261,6 @@ jsc-build-mac-compile:
-DBUN_FAST_TLS=ON \
-DENABLE_FTL_JIT=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
-DPTHREAD_JIT_PERMISSIONS_API=1 \
@@ -1289,7 +1284,6 @@ jsc-build-mac-compile-lto:
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DCMAKE_C_FLAGS="-flto=full" \
-DCMAKE_CXX_FLAGS="-flto=full" \
-DENABLE_FTL_JIT=ON \
@@ -1305,7 +1299,6 @@ jsc-build-mac-compile-lto:
.PHONY: jsc-build-mac-compile-debug
jsc-build-mac-compile-debug:
mkdir -p $(WEBKIT_DEBUG_DIR) $(WEBKIT_DIR);
# to disable asan, remove -DENABLE_SANITIZERS=address and add -DENABLE_MALLOC_HEAP_BREAKDOWN=ON
cd $(WEBKIT_DEBUG_DIR) && \
ICU_INCLUDE_DIRS="$(HOMEBREW_PREFIX)opt/icu4c/include" \
cmake \
@@ -1314,9 +1307,9 @@ jsc-build-mac-compile-debug:
-DCMAKE_BUILD_TYPE=Debug \
-DUSE_THIN_ARCHIVES=OFF \
-DENABLE_FTL_JIT=ON \
-DENABLE_MALLOC_HEAP_BREAKDOWN=ON \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-G Ninja \
@@ -1325,7 +1318,6 @@ jsc-build-mac-compile-debug:
-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DUSE_VISIBILITY_ATTRIBUTE=1 \
-DENABLE_SANITIZERS=address \
$(WEBKIT_DIR) \
$(WEBKIT_DEBUG_DIR) && \
CFLAGS="$(CFLAGS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -ffat-lto-objects" \
@@ -1342,7 +1334,6 @@ jsc-build-linux-compile-config:
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
-DUSE_THIN_ARCHIVES=OFF \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
@@ -1366,7 +1357,6 @@ jsc-build-linux-compile-config-debug:
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
-DUSE_THIN_ARCHIVES=OFF \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DUSE_BUN_EVENT_LOOP=ON \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
@@ -1385,18 +1375,18 @@ jsc-build-linux-compile-config-debug:
jsc-build-linux-compile-build:
mkdir -p $(WEBKIT_RELEASE_DIR) && \
cd $(WEBKIT_RELEASE_DIR) && \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON -DUSE_BUN_EVENT_LOOP=ON" \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
.PHONY: jsc-build-linux-compile-build-debug
jsc-build-linux-compile-build-debug:
mkdir -p $(WEBKIT_DEBUG_DIR) && \
cd $(WEBKIT_DEBUG_DIR) && \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON -DUSE_BUN_EVENT_LOOP=ON" \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
cmake --build $(WEBKIT_DEBUG_DIR) --config Debug --target jsc
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile jsc-build-copy
jsc-build-mac-debug: jsc-force-fastjit jsc-build-mac-compile-debug
jsc-build-linux: jsc-build-linux-compile-config jsc-build-linux-compile-build jsc-build-copy
@@ -1977,7 +1967,7 @@ integration-test-dev: # to run integration tests
USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node test/scripts/browser.js
copy-install:
cp src/cli/install.sh ../bun.com/docs/install.html
cp src/cli/install.sh ../bun.sh/docs/install.html
copy-to-bun-release-dir: copy-to-bun-release-dir-bin copy-to-bun-release-dir-dsym
@@ -2019,28 +2009,28 @@ vendor-dev: assert-deps submodule npm-install-dev vendor-without-npm
.PHONY: bun
bun:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'
cpp:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'
zig:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'
dev:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'
setup:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'
bindings:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'
help:
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
@echo 'See https://bun.com/docs/project/contributing for more details'
@echo 'See https://bun.sh/docs/project/contributing for more details'

556
README.md
View File

@@ -1,16 +1,16 @@
<p align="center">
<a href="https://bun.com"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
</p>
<h1 align="center">Bun</h1>
<p align="center">
<a href="https://bun.com/discord" target="_blank"><img height=20 src="https://img.shields.io/discord/876711213126520882" /></a>
<a href="https://bun.sh/discord" target="_blank"><img height=20 src="https://img.shields.io/discord/876711213126520882" /></a>
<img src="https://img.shields.io/github/stars/oven-sh/bun" alt="stars">
<a href="https://twitter.com/jarredsumner/status/1542824445810642946"><img src="https://img.shields.io/static/v1?label=speed&message=fast&color=success" alt="Bun speed" /></a>
</p>
<div align="center">
<a href="https://bun.com/docs">Documentation</a>
<a href="https://bun.sh/docs">Documentation</a>
<span>&nbsp;&nbsp;•&nbsp;&nbsp;</span>
<a href="https://discord.com/invite/CXdq2DP29u">Discord</a>
<span>&nbsp;&nbsp;•&nbsp;&nbsp;</span>
@@ -20,7 +20,7 @@
<br />
</div>
### [Read the docs →](https://bun.com/docs)
### [Read the docs →](https://bun.sh/docs)
## What is Bun?
@@ -47,14 +47,12 @@ Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
> **x64 users** — if you see "illegal instruction" or similar errors, check our [CPU requirements](https://bun.com/docs/installation#cpu-requirements-and-baseline-builds)
```sh
# with install script (recommended)
curl -fsSL https://bun.com/install | bash
curl -fsSL https://bun.sh/install | bash
# on windows
powershell -c "irm bun.com/install.ps1 | iex"
powershell -c "irm bun.sh/install.ps1 | iex"
# with npm
npm install -g bun
@@ -87,329 +85,315 @@ bun upgrade --canary
## Quick links
- Intro
- [What is Bun?](https://bun.com/docs/index)
- [Installation](https://bun.com/docs/installation)
- [Quickstart](https://bun.com/docs/quickstart)
- [TypeScript](https://bun.com/docs/typescript)
- [What is Bun?](https://bun.sh/docs/index)
- [Installation](https://bun.sh/docs/installation)
- [Quickstart](https://bun.sh/docs/quickstart)
- [TypeScript](https://bun.sh/docs/typescript)
- Templating
- [`bun init`](https://bun.com/docs/cli/init)
- [`bun create`](https://bun.com/docs/cli/bun-create)
- CLI
- [`bun upgrade`](https://bun.com/docs/cli/bun-upgrade)
- [`bun init`](https://bun.sh/docs/cli/init)
- [`bun create`](https://bun.sh/docs/cli/bun-create)
- Runtime
- [`bun run`](https://bun.com/docs/cli/run)
- [File types (Loaders)](https://bun.com/docs/runtime/loaders)
- [TypeScript](https://bun.com/docs/runtime/typescript)
- [JSX](https://bun.com/docs/runtime/jsx)
- [Environment variables](https://bun.com/docs/runtime/env)
- [Bun APIs](https://bun.com/docs/runtime/bun-apis)
- [Web APIs](https://bun.com/docs/runtime/web-apis)
- [Node.js compatibility](https://bun.com/docs/runtime/nodejs-apis)
- [Single-file executable](https://bun.com/docs/bundler/executables)
- [Plugins](https://bun.com/docs/runtime/plugins)
- [Watch mode / Hot Reloading](https://bun.com/docs/runtime/hot)
- [Module resolution](https://bun.com/docs/runtime/modules)
- [Auto-install](https://bun.com/docs/runtime/autoimport)
- [bunfig.toml](https://bun.com/docs/runtime/bunfig)
- [Debugger](https://bun.com/docs/runtime/debugger)
- [$ Shell](https://bun.com/docs/runtime/shell)
- [`bun run`](https://bun.sh/docs/cli/run)
- [File types](https://bun.sh/docs/runtime/loaders)
- [TypeScript](https://bun.sh/docs/runtime/typescript)
- [JSX](https://bun.sh/docs/runtime/jsx)
- [Environment variables](https://bun.sh/docs/runtime/env)
- [Bun APIs](https://bun.sh/docs/runtime/bun-apis)
- [Web APIs](https://bun.sh/docs/runtime/web-apis)
- [Node.js compatibility](https://bun.sh/docs/runtime/nodejs-apis)
- [Single-file executable](https://bun.sh/docs/bundler/executables)
- [Plugins](https://bun.sh/docs/runtime/plugins)
- [Watch mode](https://bun.sh/docs/runtime/hot)
- [Module resolution](https://bun.sh/docs/runtime/modules)
- [Auto-install](https://bun.sh/docs/runtime/autoimport)
- [bunfig.toml](https://bun.sh/docs/runtime/bunfig)
- [Debugger](https://bun.sh/docs/runtime/debugger)
- [Framework API](https://bun.sh/docs/runtime/framework)
- Package manager
- [`bun install`](https://bun.com/docs/cli/install)
- [`bun add`](https://bun.com/docs/cli/add)
- [`bun remove`](https://bun.com/docs/cli/remove)
- [`bun update`](https://bun.com/docs/cli/update)
- [`bun link`](https://bun.com/docs/cli/link)
- [`bun unlink`](https://bun.com/docs/cli/unlink)
- [`bun pm`](https://bun.com/docs/cli/pm)
- [`bun outdated`](https://bun.com/docs/cli/outdated)
- [`bun publish`](https://bun.com/docs/cli/publish)
- [`bun patch`](https://bun.com/docs/install/patch)
- [`bun patch-commit`](https://bun.com/docs/cli/patch-commit)
- [Global cache](https://bun.com/docs/install/cache)
- [Workspaces](https://bun.com/docs/install/workspaces)
- [Lifecycle scripts](https://bun.com/docs/install/lifecycle)
- [Filter](https://bun.com/docs/cli/filter)
- [Lockfile](https://bun.com/docs/install/lockfile)
- [Scopes and registries](https://bun.com/docs/install/registries)
- [Overrides and resolutions](https://bun.com/docs/install/overrides)
- [`.npmrc`](https://bun.com/docs/install/npmrc)
- [`bun install`](https://bun.sh/docs/cli/install)
- [`bun add`](https://bun.sh/docs/cli/add)
- [`bun remove`](https://bun.sh/docs/cli/remove)
- [`bun update`](https://bun.sh/docs/cli/update)
- [`bun link`](https://bun.sh/docs/cli/link)
- [`bun pm`](https://bun.sh/docs/cli/pm)
- [Global cache](https://bun.sh/docs/install/cache)
- [Workspaces](https://bun.sh/docs/install/workspaces)
- [Lifecycle scripts](https://bun.sh/docs/install/lifecycle)
- [Filter](https://bun.sh/docs/cli/filter)
- [Lockfile](https://bun.sh/docs/install/lockfile)
- [Scopes and registries](https://bun.sh/docs/install/registries)
- [Overrides and resolutions](https://bun.sh/docs/install/overrides)
- Bundler
- [`Bun.build`](https://bun.com/docs/bundler)
- [Loaders](https://bun.com/docs/bundler/loaders)
- [Plugins](https://bun.com/docs/bundler/plugins)
- [Macros](https://bun.com/docs/bundler/macros)
- [vs esbuild](https://bun.com/docs/bundler/vs-esbuild)
- [Single-file executable](https://bun.com/docs/bundler/executables)
- [CSS](https://bun.com/docs/bundler/css)
- [HTML](https://bun.com/docs/bundler/html)
- [Hot Module Replacement (HMR)](https://bun.com/docs/bundler/hmr)
- [Full-stack with HTML imports](https://bun.com/docs/bundler/fullstack)
- [`Bun.build`](https://bun.sh/docs/bundler)
- [Loaders](https://bun.sh/docs/bundler/loaders)
- [Plugins](https://bun.sh/docs/bundler/plugins)
- [Macros](https://bun.sh/docs/bundler/macros)
- [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild)
- Test runner
- [`bun test`](https://bun.com/docs/cli/test)
- [Writing tests](https://bun.com/docs/test/writing)
- [Watch mode](https://bun.com/docs/test/hot)
- [Lifecycle hooks](https://bun.com/docs/test/lifecycle)
- [Mocks](https://bun.com/docs/test/mocks)
- [Snapshots](https://bun.com/docs/test/snapshots)
- [Dates and times](https://bun.com/docs/test/time)
- [DOM testing](https://bun.com/docs/test/dom)
- [Code coverage](https://bun.com/docs/test/coverage)
- [Configuration](https://bun.com/docs/test/configuration)
- [Discovery](https://bun.com/docs/test/discovery)
- [Reporters](https://bun.com/docs/test/reporters)
- [Runtime Behavior](https://bun.com/docs/test/runtime-behavior)
- [`bun test`](https://bun.sh/docs/cli/test)
- [Writing tests](https://bun.sh/docs/test/writing)
- [Watch mode](https://bun.sh/docs/test/hot)
- [Lifecycle hooks](https://bun.sh/docs/test/lifecycle)
- [Mocks](https://bun.sh/docs/test/mocks)
- [Snapshots](https://bun.sh/docs/test/snapshots)
- [Dates and times](https://bun.sh/docs/test/time)
- [DOM testing](https://bun.sh/docs/test/dom)
- [Code coverage](https://bun.sh/docs/test/coverage)
- Package runner
- [`bunx`](https://bun.com/docs/cli/bunx)
- [`bunx`](https://bun.sh/docs/cli/bunx)
- API
- [HTTP server (`Bun.serve`)](https://bun.com/docs/api/http)
- [WebSockets](https://bun.com/docs/api/websockets)
- [Workers](https://bun.com/docs/api/workers)
- [Binary data](https://bun.com/docs/api/binary-data)
- [Streams](https://bun.com/docs/api/streams)
- [File I/O (`Bun.file`)](https://bun.com/docs/api/file-io)
- [import.meta](https://bun.com/docs/api/import-meta)
- [SQLite (`bun:sqlite`)](https://bun.com/docs/api/sqlite)
- [PostgreSQL (`Bun.sql`)](https://bun.com/docs/api/sql)
- [Redis (`Bun.redis`)](https://bun.com/docs/api/redis)
- [S3 Client (`Bun.s3`)](https://bun.com/docs/api/s3)
- [FileSystemRouter](https://bun.com/docs/api/file-system-router)
- [TCP sockets](https://bun.com/docs/api/tcp)
- [UDP sockets](https://bun.com/docs/api/udp)
- [Globals](https://bun.com/docs/api/globals)
- [$ Shell](https://bun.com/docs/runtime/shell)
- [Child processes (spawn)](https://bun.com/docs/api/spawn)
- [Transpiler (`Bun.Transpiler`)](https://bun.com/docs/api/transpiler)
- [Hashing](https://bun.com/docs/api/hashing)
- [Colors (`Bun.color`)](https://bun.com/docs/api/color)
- [Console](https://bun.com/docs/api/console)
- [FFI (`bun:ffi`)](https://bun.com/docs/api/ffi)
- [C Compiler (`bun:ffi` cc)](https://bun.com/docs/api/cc)
- [HTMLRewriter](https://bun.com/docs/api/html-rewriter)
- [Testing (`bun:test`)](https://bun.com/docs/api/test)
- [Cookies (`Bun.Cookie`)](https://bun.com/docs/api/cookie)
- [Utils](https://bun.com/docs/api/utils)
- [Node-API](https://bun.com/docs/api/node-api)
- [Glob (`Bun.Glob`)](https://bun.com/docs/api/glob)
- [Semver (`Bun.semver`)](https://bun.com/docs/api/semver)
- [DNS](https://bun.com/docs/api/dns)
- [fetch API extensions](https://bun.com/docs/api/fetch)
- [HTTP server](https://bun.sh/docs/api/http)
- [WebSockets](https://bun.sh/docs/api/websockets)
- [Workers](https://bun.sh/docs/api/workers)
- [Binary data](https://bun.sh/docs/api/binary-data)
- [Streams](https://bun.sh/docs/api/streams)
- [File I/O](https://bun.sh/docs/api/file-io)
- [import.meta](https://bun.sh/docs/api/import-meta)
- [SQLite](https://bun.sh/docs/api/sqlite)
- [FileSystemRouter](https://bun.sh/docs/api/file-system-router)
- [TCP sockets](https://bun.sh/docs/api/tcp)
- [UDP sockets](https://bun.sh/docs/api/udp)
- [Globals](https://bun.sh/docs/api/globals)
- [$ Shell](https://bun.sh/docs/runtime/shell)
- [Child processes](https://bun.sh/docs/api/spawn)
- [Transpiler](https://bun.sh/docs/api/transpiler)
- [Hashing](https://bun.sh/docs/api/hashing)
- [Console](https://bun.sh/docs/api/console)
- [FFI](https://bun.sh/docs/api/ffi)
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
- [Testing](https://bun.sh/docs/api/test)
- [Utils](https://bun.sh/docs/api/utils)
- [Node-API](https://bun.sh/docs/api/node-api)
- [Glob](https://bun.sh/docs/api/glob)
- [Semver](https://bun.sh/docs/api/semver)
- Project
- [Roadmap](https://bun.sh/docs/project/roadmap)
- [Benchmarking](https://bun.sh/docs/project/benchmarking)
- [Contributing](https://bun.sh/docs/project/contributing)
- [Building Windows](https://bun.sh/docs/project/building-windows)
- [License](https://bun.sh/docs/project/licensing)
## Guides
- Binary
- [Convert a Blob to a string](https://bun.com/guides/binary/blob-to-string)
- [Convert a Buffer to a blob](https://bun.com/guides/binary/buffer-to-blob)
- [Convert a Blob to a DataView](https://bun.com/guides/binary/blob-to-dataview)
- [Convert a Buffer to a string](https://bun.com/guides/binary/buffer-to-string)
- [Convert a Blob to a ReadableStream](https://bun.com/guides/binary/blob-to-stream)
- [Convert a Blob to a Uint8Array](https://bun.com/guides/binary/blob-to-typedarray)
- [Convert a DataView to a string](https://bun.com/guides/binary/dataview-to-string)
- [Convert a Uint8Array to a Blob](https://bun.com/guides/binary/typedarray-to-blob)
- [Convert a Blob to an ArrayBuffer](https://bun.com/guides/binary/blob-to-arraybuffer)
- [Convert an ArrayBuffer to a Blob](https://bun.com/guides/binary/arraybuffer-to-blob)
- [Convert a Buffer to a Uint8Array](https://bun.com/guides/binary/buffer-to-typedarray)
- [Convert a Uint8Array to a Buffer](https://bun.com/guides/binary/typedarray-to-buffer)
- [Convert a Uint8Array to a string](https://bun.com/guides/binary/typedarray-to-string)
- [Convert a Buffer to an ArrayBuffer](https://bun.com/guides/binary/buffer-to-arraybuffer)
- [Convert an ArrayBuffer to a Buffer](https://bun.com/guides/binary/arraybuffer-to-buffer)
- [Convert an ArrayBuffer to a string](https://bun.com/guides/binary/arraybuffer-to-string)
- [Convert a Uint8Array to a DataView](https://bun.com/guides/binary/typedarray-to-dataview)
- [Convert a Buffer to a ReadableStream](https://bun.com/guides/binary/buffer-to-readablestream)
- [Convert a Uint8Array to an ArrayBuffer](https://bun.com/guides/binary/typedarray-to-arraybuffer)
- [Convert an ArrayBuffer to a Uint8Array](https://bun.com/guides/binary/arraybuffer-to-typedarray)
- [Convert an ArrayBuffer to an array of numbers](https://bun.com/guides/binary/arraybuffer-to-array)
- [Convert a Uint8Array to a ReadableStream](https://bun.com/guides/binary/typedarray-to-readablestream)
- [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview)
- [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream)
- [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string)
- [Convert a Blob to a Uint8Array](https://bun.sh/guides/binary/blob-to-typedarray)
- [Convert a Blob to an ArrayBuffer](https://bun.sh/guides/binary/blob-to-arraybuffer)
- [Convert a Buffer to a blob](https://bun.sh/guides/binary/buffer-to-blob)
- [Convert a Buffer to a ReadableStream](https://bun.sh/guides/binary/buffer-to-readablestream)
- [Convert a Buffer to a string](https://bun.sh/guides/binary/buffer-to-string)
- [Convert a Buffer to a Uint8Array](https://bun.sh/guides/binary/buffer-to-typedarray)
- [Convert a Buffer to an ArrayBuffer](https://bun.sh/guides/binary/buffer-to-arraybuffer)
- [Convert a DataView to a string](https://bun.sh/guides/binary/dataview-to-string)
- [Convert a Uint8Array to a Blob](https://bun.sh/guides/binary/typedarray-to-blob)
- [Convert a Uint8Array to a Buffer](https://bun.sh/guides/binary/typedarray-to-buffer)
- [Convert a Uint8Array to a DataView](https://bun.sh/guides/binary/typedarray-to-dataview)
- [Convert a Uint8Array to a ReadableStream](https://bun.sh/guides/binary/typedarray-to-readablestream)
- [Convert a Uint8Array to a string](https://bun.sh/guides/binary/typedarray-to-string)
- [Convert a Uint8Array to an ArrayBuffer](https://bun.sh/guides/binary/typedarray-to-arraybuffer)
- [Convert an ArrayBuffer to a Blob](https://bun.sh/guides/binary/arraybuffer-to-blob)
- [Convert an ArrayBuffer to a Buffer](https://bun.sh/guides/binary/arraybuffer-to-buffer)
- [Convert an ArrayBuffer to a string](https://bun.sh/guides/binary/arraybuffer-to-string)
- [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray)
- [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array)
- Ecosystem
- [Use React and JSX](https://bun.com/guides/ecosystem/react)
- [Use EdgeDB with Bun](https://bun.com/guides/ecosystem/edgedb)
- [Use Prisma with Bun](https://bun.com/guides/ecosystem/prisma)
- [Add Sentry to a Bun app](https://bun.com/guides/ecosystem/sentry)
- [Create a Discord bot](https://bun.com/guides/ecosystem/discordjs)
- [Run Bun as a daemon with PM2](https://bun.com/guides/ecosystem/pm2)
- [Use Drizzle ORM with Bun](https://bun.com/guides/ecosystem/drizzle)
- [Build an app with Nuxt and Bun](https://bun.com/guides/ecosystem/nuxt)
- [Build an app with Qwik and Bun](https://bun.com/guides/ecosystem/qwik)
- [Build an app with Astro and Bun](https://bun.com/guides/ecosystem/astro)
- [Build an app with Remix and Bun](https://bun.com/guides/ecosystem/remix)
- [Build a frontend using Vite and Bun](https://bun.com/guides/ecosystem/vite)
- [Build an app with Next.js and Bun](https://bun.com/guides/ecosystem/nextjs)
- [Run Bun as a daemon with systemd](https://bun.com/guides/ecosystem/systemd)
- [Deploy a Bun application on Render](https://bun.com/guides/ecosystem/render)
- [Build an HTTP server using Hono and Bun](https://bun.com/guides/ecosystem/hono)
- [Build an app with SvelteKit and Bun](https://bun.com/guides/ecosystem/sveltekit)
- [Build an app with SolidStart and Bun](https://bun.com/guides/ecosystem/solidstart)
- [Build an HTTP server using Elysia and Bun](https://bun.com/guides/ecosystem/elysia)
- [Build an HTTP server using StricJS and Bun](https://bun.com/guides/ecosystem/stric)
- [Containerize a Bun application with Docker](https://bun.com/guides/ecosystem/docker)
- [Build an HTTP server using Express and Bun](https://bun.com/guides/ecosystem/express)
- [Use Neon Postgres through Drizzle ORM](https://bun.com/guides/ecosystem/neon-drizzle)
- [Server-side render (SSR) a React component](https://bun.com/guides/ecosystem/ssr-react)
- [Read and write data to MongoDB using Mongoose and Bun](https://bun.com/guides/ecosystem/mongoose)
- [Use Neon's Serverless Postgres with Bun](https://bun.com/guides/ecosystem/neon-serverless-postgres)
- HTMLRewriter
- [Extract links from a webpage using HTMLRewriter](https://bun.com/guides/html-rewriter/extract-links)
- [Extract social share images and Open Graph tags](https://bun.com/guides/html-rewriter/extract-social-meta)
- [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite)
- [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro)
- [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs)
- [Build an app with Nuxt and Bun](https://bun.sh/guides/ecosystem/nuxt)
- [Build an app with Qwik and Bun](https://bun.sh/guides/ecosystem/qwik)
- [Build an app with Remix and Bun](https://bun.sh/guides/ecosystem/remix)
- [Build an app with SolidStart and Bun](https://bun.sh/guides/ecosystem/solidstart)
- [Build an app with SvelteKit and Bun](https://bun.sh/guides/ecosystem/sveltekit)
- [Build an HTTP server using Elysia and Bun](https://bun.sh/guides/ecosystem/elysia)
- [Build an HTTP server using Express and Bun](https://bun.sh/guides/ecosystem/express)
- [Build an HTTP server using Hono and Bun](https://bun.sh/guides/ecosystem/hono)
- [Build an HTTP server using StricJS and Bun](https://bun.sh/guides/ecosystem/stric)
- [Containerize a Bun application with Docker](https://bun.sh/guides/ecosystem/docker)
- [Create a Discord bot](https://bun.sh/guides/ecosystem/discordjs)
- [Deploy a Bun application on Render](https://bun.sh/guides/ecosystem/render)
- [Read and write data to MongoDB using Mongoose and Bun](https://bun.sh/guides/ecosystem/mongoose)
- [Run Bun as a daemon with PM2](https://bun.sh/guides/ecosystem/pm2)
- [Run Bun as a daemon with systemd](https://bun.sh/guides/ecosystem/systemd)
- [Server-side render (SSR) a React component](https://bun.sh/guides/ecosystem/ssr-react)
- [Use Drizzle ORM with Bun](https://bun.sh/guides/ecosystem/drizzle)
- [Use EdgeDB with Bun](https://bun.sh/guides/ecosystem/edgedb)
- [Use Neon's Serverless Postgres with Bun](https://bun.sh/guides/ecosystem/neon-serverless-postgres)
- [Use Prisma with Bun](https://bun.sh/guides/ecosystem/prisma)
- [Use React and JSX](https://bun.sh/guides/ecosystem/react)
- [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry)
- HTTP
- [Hot reload an HTTP server](https://bun.com/guides/http/hot)
- [Common HTTP server usage](https://bun.com/guides/http/server)
- [Write a simple HTTP server](https://bun.com/guides/http/simple)
- [Configure TLS on an HTTP server](https://bun.com/guides/http/tls)
- [Send an HTTP request using fetch](https://bun.com/guides/http/fetch)
- [Proxy HTTP requests using fetch()](https://bun.com/guides/http/proxy)
- [Start a cluster of HTTP servers](https://bun.com/guides/http/cluster)
- [Stream a file as an HTTP Response](https://bun.com/guides/http/stream-file)
- [fetch with unix domain sockets in Bun](https://bun.com/guides/http/fetch-unix)
- [Upload files via HTTP using FormData](https://bun.com/guides/http/file-uploads)
- [Streaming HTTP Server with Async Iterators](https://bun.com/guides/http/stream-iterator)
- [Streaming HTTP Server with Node.js Streams](https://bun.com/guides/http/stream-node-streams-in-bun)
- [Common HTTP server usage](https://bun.sh/guides/http/server)
- [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls)
- [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix)
- [Hot reload an HTTP server](https://bun.sh/guides/http/hot)
- [Proxy HTTP requests using fetch()](https://bun.sh/guides/http/proxy)
- [Send an HTTP request using fetch](https://bun.sh/guides/http/fetch)
- [Start a cluster of HTTP servers](https://bun.sh/guides/http/cluster)
- [Stream a file as an HTTP Response](https://bun.sh/guides/http/stream-file)
- [Streaming HTTP Server with Async Iterators](https://bun.sh/guides/http/stream-iterator)
- [Streaming HTTP Server with Node.js Streams](https://bun.sh/guides/http/stream-node-streams-in-bun)
- [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads)
- [Write a simple HTTP server](https://bun.sh/guides/http/simple)
- Install
- [Add a dependency](https://bun.com/guides/install/add)
- [Add a Git dependency](https://bun.com/guides/install/add-git)
- [Add a peer dependency](https://bun.com/guides/install/add-peer)
- [Add a trusted dependency](https://bun.com/guides/install/trusted)
- [Add a development dependency](https://bun.com/guides/install/add-dev)
- [Add a tarball dependency](https://bun.com/guides/install/add-tarball)
- [Add an optional dependency](https://bun.com/guides/install/add-optional)
- [Generate a yarn-compatible lockfile](https://bun.com/guides/install/yarnlock)
- [Configuring a monorepo using workspaces](https://bun.com/guides/install/workspaces)
- [Install a package under a different name](https://bun.com/guides/install/npm-alias)
- [Install dependencies with Bun in GitHub Actions](https://bun.com/guides/install/cicd)
- [Using bun install with Artifactory](https://bun.com/guides/install/jfrog-artifactory)
- [Configure git to diff Bun's lockb lockfile](https://bun.com/guides/install/git-diff-bun-lockfile)
- [Override the default npm registry for bun install](https://bun.com/guides/install/custom-registry)
- [Using bun install with an Azure Artifacts npm registry](https://bun.com/guides/install/azure-artifacts)
- [Migrate from npm install to bun install](https://bun.com/guides/install/from-npm-install-to-bun-install)
- [Configure a private registry for an organization scope with bun install](https://bun.com/guides/install/registry-scope)
- [Add a dependency](https://bun.sh/guides/install/add)
- [Add a development dependency](https://bun.sh/guides/install/add-dev)
- [Add a Git dependency](https://bun.sh/guides/install/add-git)
- [Add a peer dependency](https://bun.sh/guides/install/add-peer)
- [Add a tarball dependency](https://bun.sh/guides/install/add-tarball)
- [Add a trusted dependency](https://bun.sh/guides/install/trusted)
- [Add an optional dependency](https://bun.sh/guides/install/add-optional)
- [Configure a private registry for an organization scope with bun install](https://bun.sh/guides/install/registry-scope)
- [Configure git to diff Bun's lockb lockfile](https://bun.sh/guides/install/git-diff-bun-lockfile)
- [Configuring a monorepo using workspaces](https://bun.sh/guides/install/workspaces)
- [Generate a human-readable lockfile](https://bun.sh/guides/install/yarnlock)
- [Install a package under a different name](https://bun.sh/guides/install/npm-alias)
- [Install dependencies with Bun in GitHub Actions](https://bun.sh/guides/install/cicd)
- [Override the default npm registry for bun install](https://bun.sh/guides/install/custom-registry)
- [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts)
- [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory)
- Process
- [Read from stdin](https://bun.com/guides/process/stdin)
- [Listen for CTRL+C](https://bun.com/guides/process/ctrl-c)
- [Spawn a child process](https://bun.com/guides/process/spawn)
- [Listen to OS signals](https://bun.com/guides/process/os-signals)
- [Parse command-line arguments](https://bun.com/guides/process/argv)
- [Read stderr from a child process](https://bun.com/guides/process/spawn-stderr)
- [Read stdout from a child process](https://bun.com/guides/process/spawn-stdout)
- [Get the process uptime in nanoseconds](https://bun.com/guides/process/nanoseconds)
- [Spawn a child process and communicate using IPC](https://bun.com/guides/process/ipc)
- [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds)
- [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c)
- [Listen to OS signals](https://bun.sh/guides/process/os-signals)
- [Parse command-line arguments](https://bun.sh/guides/process/argv)
- [Read from stdin](https://bun.sh/guides/process/stdin)
- [Read stderr from a child process](https://bun.sh/guides/process/spawn-stderr)
- [Read stdout from a child process](https://bun.sh/guides/process/spawn-stdout)
- [Spawn a child process](https://bun.sh/guides/process/spawn)
- [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc)
- Read file
- [Read a JSON file](https://bun.com/guides/read-file/json)
- [Check if a file exists](https://bun.com/guides/read-file/exists)
- [Read a file as a string](https://bun.com/guides/read-file/string)
- [Read a file to a Buffer](https://bun.com/guides/read-file/buffer)
- [Get the MIME type of a file](https://bun.com/guides/read-file/mime)
- [Watch a directory for changes](https://bun.com/guides/read-file/watch)
- [Read a file as a ReadableStream](https://bun.com/guides/read-file/stream)
- [Read a file to a Uint8Array](https://bun.com/guides/read-file/uint8array)
- [Read a file to an ArrayBuffer](https://bun.com/guides/read-file/arraybuffer)
- [Check if a file exists](https://bun.sh/guides/read-file/exists)
- [Get the MIME type of a file](https://bun.sh/guides/read-file/mime)
- [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream)
- [Read a file as a string](https://bun.sh/guides/read-file/string)
- [Read a file to a Buffer](https://bun.sh/guides/read-file/buffer)
- [Read a file to a Uint8Array](https://bun.sh/guides/read-file/uint8array)
- [Read a file to an ArrayBuffer](https://bun.sh/guides/read-file/arraybuffer)
- [Read a JSON file](https://bun.sh/guides/read-file/json)
- [Watch a directory for changes](https://bun.sh/guides/read-file/watch)
- Runtime
- [Delete files](https://bun.com/guides/runtime/delete-file)
- [Run a Shell Command](https://bun.com/guides/runtime/shell)
- [Import a JSON file](https://bun.com/guides/runtime/import-json)
- [Import a TOML file](https://bun.com/guides/runtime/import-toml)
- [Set a time zone in Bun](https://bun.com/guides/runtime/timezone)
- [Set environment variables](https://bun.com/guides/runtime/set-env)
- [Re-map import paths](https://bun.com/guides/runtime/tsconfig-paths)
- [Delete directories](https://bun.com/guides/runtime/delete-directory)
- [Read environment variables](https://bun.com/guides/runtime/read-env)
- [Import a HTML file as text](https://bun.com/guides/runtime/import-html)
- [Install and run Bun in GitHub Actions](https://bun.com/guides/runtime/cicd)
- [Debugging Bun with the web debugger](https://bun.com/guides/runtime/web-debugger)
- [Install TypeScript declarations for Bun](https://bun.com/guides/runtime/typescript)
- [Debugging Bun with the VS Code extension](https://bun.com/guides/runtime/vscode-debugger)
- [Inspect memory usage using V8 heap snapshots](https://bun.com/guides/runtime/heap-snapshot)
- [Define and replace static globals & constants](https://bun.com/guides/runtime/define-constant)
- [Codesign a single-file JavaScript executable on macOS](https://bun.com/guides/runtime/codesign-macos-executable)
- [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger)
- [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger)
- [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant)
- [Import a JSON file](https://bun.sh/guides/runtime/import-json)
- [Import a TOML file](https://bun.sh/guides/runtime/import-toml)
- [Import HTML file as text](https://bun.sh/guides/runtime/import-html)
- [Install and run Bun in GitHub Actions](https://bun.sh/guides/runtime/cicd)
- [Install TypeScript declarations for Bun](https://bun.sh/guides/runtime/typescript)
- [Re-map import paths](https://bun.sh/guides/runtime/tsconfig-paths)
- [Read environment variables](https://bun.sh/guides/runtime/read-env)
- [Run a Shell Command](https://bun.sh/guides/runtime/shell)
- [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone)
- [Set environment variables](https://bun.sh/guides/runtime/set-env)
- Streams
- [Convert a ReadableStream to JSON](https://bun.com/guides/streams/to-json)
- [Convert a ReadableStream to a Blob](https://bun.com/guides/streams/to-blob)
- [Convert a ReadableStream to a Buffer](https://bun.com/guides/streams/to-buffer)
- [Convert a ReadableStream to a string](https://bun.com/guides/streams/to-string)
- [Convert a ReadableStream to a Uint8Array](https://bun.com/guides/streams/to-typedarray)
- [Convert a ReadableStream to an array of chunks](https://bun.com/guides/streams/to-array)
- [Convert a Node.js Readable to JSON](https://bun.com/guides/streams/node-readable-to-json)
- [Convert a ReadableStream to an ArrayBuffer](https://bun.com/guides/streams/to-arraybuffer)
- [Convert a Node.js Readable to a Blob](https://bun.com/guides/streams/node-readable-to-blob)
- [Convert a Node.js Readable to a string](https://bun.com/guides/streams/node-readable-to-string)
- [Convert a Node.js Readable to an Uint8Array](https://bun.com/guides/streams/node-readable-to-uint8array)
- [Convert a Node.js Readable to an ArrayBuffer](https://bun.com/guides/streams/node-readable-to-arraybuffer)
- [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob)
- [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string)
- [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer)
- [Convert a Node.js Readable to JSON](https://bun.sh/guides/streams/node-readable-to-json)
- [Convert a ReadableStream to a Blob](https://bun.sh/guides/streams/to-blob)
- [Convert a ReadableStream to a Buffer](https://bun.sh/guides/streams/to-buffer)
- [Convert a ReadableStream to a string](https://bun.sh/guides/streams/to-string)
- [Convert a ReadableStream to a Uint8Array](https://bun.sh/guides/streams/to-typedarray)
- [Convert a ReadableStream to an array of chunks](https://bun.sh/guides/streams/to-array)
- [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer)
- [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json)
- Test
- [Spy on methods in `bun test`](https://bun.com/guides/test/spy-on)
- [Bail early with the Bun test runner](https://bun.com/guides/test/bail)
- [Mock functions in `bun test`](https://bun.com/guides/test/mock-functions)
- [Run tests in watch mode with Bun](https://bun.com/guides/test/watch-mode)
- [Use snapshot testing in `bun test`](https://bun.com/guides/test/snapshot)
- [Skip tests with the Bun test runner](https://bun.com/guides/test/skip-tests)
- [Using Testing Library with Bun](https://bun.com/guides/test/testing-library)
- [Update snapshots in `bun test`](https://bun.com/guides/test/update-snapshots)
- [Run your tests with the Bun test runner](https://bun.com/guides/test/run-tests)
- [Set the system time in Bun's test runner](https://bun.com/guides/test/mock-clock)
- [Set a per-test timeout with the Bun test runner](https://bun.com/guides/test/timeout)
- [Migrate from Jest to Bun's test runner](https://bun.com/guides/test/migrate-from-jest)
- [Write browser DOM tests with Bun and happy-dom](https://bun.com/guides/test/happy-dom)
- [Mark a test as a "todo" with the Bun test runner](https://bun.com/guides/test/todo-tests)
- [Re-run tests multiple times with the Bun test runner](https://bun.com/guides/test/rerun-each)
- [Generate code coverage reports with the Bun test runner](https://bun.com/guides/test/coverage)
- [import, require, and test Svelte components with bun test](https://bun.com/guides/test/svelte-test)
- [Set a code coverage threshold with the Bun test runner](https://bun.com/guides/test/coverage-threshold)
- [Bail early with the Bun test runner](https://bun.sh/guides/test/bail)
- [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage)
- [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests)
- [Migrate from Jest to Bun's test runner](https://bun.sh/guides/test/migrate-from-jest)
- [Mock functions in `bun test`](https://bun.sh/guides/test/mock-functions)
- [Re-run tests multiple times with the Bun test runner](https://bun.sh/guides/test/rerun-each)
- [Run tests in watch mode with Bun](https://bun.sh/guides/test/watch-mode)
- [Run your tests with the Bun test runner](https://bun.sh/guides/test/run-tests)
- [Set a code coverage threshold with the Bun test runner](https://bun.sh/guides/test/coverage-threshold)
- [Set a per-test timeout with the Bun test runner](https://bun.sh/guides/test/timeout)
- [Set the system time in Bun's test runner](https://bun.sh/guides/test/mock-clock)
- [Skip tests with the Bun test runner](https://bun.sh/guides/test/skip-tests)
- [Spy on methods in `bun test`](https://bun.sh/guides/test/spy-on)
- [Update snapshots in `bun test`](https://bun.sh/guides/test/update-snapshots)
- [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot)
- [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom)
- Util
- [Generate a UUID](https://bun.com/guides/util/javascript-uuid)
- [Hash a password](https://bun.com/guides/util/hash-a-password)
- [Escape an HTML string](https://bun.com/guides/util/escape-html)
- [Get the current Bun version](https://bun.com/guides/util/version)
- [Encode and decode base64 strings](https://bun.com/guides/util/base64)
- [Compress and decompress data with gzip](https://bun.com/guides/util/gzip)
- [Sleep for a fixed number of milliseconds](https://bun.com/guides/util/sleep)
- [Detect when code is executed with Bun](https://bun.com/guides/util/detect-bun)
- [Check if two objects are deeply equal](https://bun.com/guides/util/deep-equals)
- [Compress and decompress data with DEFLATE](https://bun.com/guides/util/deflate)
- [Get the absolute path to the current entrypoint](https://bun.com/guides/util/main)
- [Get the directory of the current file](https://bun.com/guides/util/import-meta-dir)
- [Check if the current file is the entrypoint](https://bun.com/guides/util/entrypoint)
- [Get the file name of the current file](https://bun.com/guides/util/import-meta-file)
- [Convert a file URL to an absolute path](https://bun.com/guides/util/file-url-to-path)
- [Convert an absolute path to a file URL](https://bun.com/guides/util/path-to-file-url)
- [Get the absolute path of the current file](https://bun.com/guides/util/import-meta-path)
- [Get the path to an executable bin file](https://bun.com/guides/util/which-path-to-executable-bin)
- [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint)
- [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals)
- [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate)
- [Compress and decompress data with gzip](https://bun.sh/guides/util/gzip)
- [Convert a file URL to an absolute path](https://bun.sh/guides/util/file-url-to-path)
- [Convert an absolute path to a file URL](https://bun.sh/guides/util/path-to-file-url)
- [Detect when code is executed with Bun](https://bun.sh/guides/util/detect-bun)
- [Encode and decode base64 strings](https://bun.sh/guides/util/base64)
- [Escape an HTML string](https://bun.sh/guides/util/escape-html)
- [Get the absolute path of the current file](https://bun.sh/guides/util/import-meta-path)
- [Get the absolute path to the current entrypoint](https://bun.sh/guides/util/main)
- [Get the current Bun version](https://bun.sh/guides/util/version)
- [Get the directory of the current file](https://bun.sh/guides/util/import-meta-dir)
- [Get the file name of the current file](https://bun.sh/guides/util/import-meta-file)
- [Get the path to an executable bin file](https://bun.sh/guides/util/which-path-to-executable-bin)
- [Hash a password](https://bun.sh/guides/util/hash-a-password)
- [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep)
- WebSocket
- [Build a publish-subscribe WebSocket server](https://bun.com/guides/websocket/pubsub)
- [Build a simple WebSocket server](https://bun.com/guides/websocket/simple)
- [Enable compression for WebSocket messages](https://bun.com/guides/websocket/compression)
- [Set per-socket contextual data on a WebSocket](https://bun.com/guides/websocket/context)
- [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub)
- [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple)
- [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression)
- [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context)
- Write file
- [Delete a file](https://bun.com/guides/write-file/unlink)
- [Write to stdout](https://bun.com/guides/write-file/stdout)
- [Write a file to stdout](https://bun.com/guides/write-file/cat)
- [Write a Blob to a file](https://bun.com/guides/write-file/blob)
- [Write a string to a file](https://bun.com/guides/write-file/basic)
- [Append content to a file](https://bun.com/guides/write-file/append)
- [Write a file incrementally](https://bun.com/guides/write-file/filesink)
- [Write a Response to a file](https://bun.com/guides/write-file/response)
- [Copy a file to another location](https://bun.com/guides/write-file/file-cp)
- [Write a ReadableStream to a file](https://bun.com/guides/write-file/stream)
- [Append content to a file](https://bun.sh/guides/write-file/append)
- [Copy a file to another location](https://bun.sh/guides/write-file/file-cp)
- [Delete a file](https://bun.sh/guides/write-file/unlink)
- [Write a Blob to a file](https://bun.sh/guides/write-file/blob)
- [Write a file incrementally](https://bun.sh/guides/write-file/filesink)
- [Write a file to stdout](https://bun.sh/guides/write-file/cat)
- [Write a ReadableStream to a file](https://bun.sh/guides/write-file/stream)
- [Write a Response to a file](https://bun.sh/guides/write-file/response)
- [Write a string to a file](https://bun.sh/guides/write-file/basic)
- [Write to stdout](https://bun.sh/guides/write-file/stdout)
## Contributing
Refer to the [Project > Contributing](https://bun.com/docs/project/contributing) guide to start contributing to Bun.
Refer to the [Project > Contributing](https://bun.sh/docs/project/contributing) guide to start contributing to Bun.
## License
Refer to the [Project > License](https://bun.com/docs/project/licensing) page for information about Bun's licensing.
Refer to the [Project > License](https://bun.sh/docs/project/licensing) page for information about Bun's licensing.

View File

@@ -8,4 +8,5 @@
## Reporting a Vulnerability
Report any discovered vulnerabilities to the Bun team by emailing `security@bun.com`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue.
Report any discovered vulnerabilities to the Bun team by emailing `security@bun.sh`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue.

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -1,44 +0,0 @@
import { bench, run } from "../runner.mjs";
import crypto from "node:crypto";
import { Buffer } from "node:buffer";
const keylen = { "aes-128-gcm": 16, "aes-192-gcm": 24, "aes-256-gcm": 32 };
const sizes = [4 * 1024, 1024 * 1024];
const ciphers = ["aes-128-gcm", "aes-192-gcm", "aes-256-gcm"];
const messages = {};
sizes.forEach(size => {
messages[size] = Buffer.alloc(size, "b");
});
const keys = {};
ciphers.forEach(cipher => {
keys[cipher] = crypto.randomBytes(keylen[cipher]);
});
// Fixed IV and AAD
const iv = crypto.randomBytes(12);
const associate_data = Buffer.alloc(16, "z");
for (const cipher of ciphers) {
for (const size of sizes) {
const message = messages[size];
const key = keys[cipher];
bench(`${cipher} ${size / 1024}KB`, () => {
const alice = crypto.createCipheriv(cipher, key, iv);
alice.setAAD(associate_data);
const enc = alice.update(message);
alice.final();
const tag = alice.getAuthTag();
const bob = crypto.createDecipheriv(cipher, key, iv);
bob.setAuthTag(tag);
bob.setAAD(associate_data);
bob.update(enc);
bob.final();
});
}
}
await run();

View File

@@ -1,53 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Pre-generate DH params to avoid including setup in benchmarks
const dhSize = 1024; // Reduced from 2048 for faster testing
const dh = crypto.createDiffieHellman(dhSize);
const dhPrime = dh.getPrime();
const dhGenerator = dh.getGenerator();
// Classical Diffie-Hellman
bench("DH - generateKeys", () => {
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
return alice.generateKeys();
});
bench("DH - computeSecret", () => {
// Setup
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
const aliceKey = alice.generateKeys();
const bob = crypto.createDiffieHellman(dhPrime, dhGenerator);
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with prime256v1 (P-256)
bench("ECDH-P256 - generateKeys", () => {
const ecdh = crypto.createECDH("prime256v1");
return ecdh.generateKeys();
});
bench("ECDH-P256 - computeSecret", () => {
// Setup
const alice = crypto.createECDH("prime256v1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("prime256v1");
const bobKey = bob.generateKeys();
// Benchmark just the secret computation
return alice.computeSecret(bobKey);
});
// ECDH with secp384r1 (P-384)
bench("ECDH-P384 - computeSecret", () => {
const alice = crypto.createECDH("secp384r1");
const aliceKey = alice.generateKeys();
const bob = crypto.createECDH("secp384r1");
const bobKey = bob.generateKeys();
return alice.computeSecret(bobKey);
});
await run();

View File

@@ -1,44 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
function generateTestKeyPairs() {
const curves = crypto.getCurves();
const keys = {};
for (const curve of curves) {
const ecdh = crypto.createECDH(curve);
ecdh.generateKeys();
keys[curve] = {
compressed: ecdh.getPublicKey("hex", "compressed"),
uncompressed: ecdh.getPublicKey("hex", "uncompressed"),
instance: ecdh,
};
}
return keys;
}
const testKeys = generateTestKeyPairs();
bench("ECDH key format - P256 compressed to uncompressed", () => {
const publicKey = testKeys["prime256v1"].compressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P256 uncompressed to compressed", () => {
const publicKey = testKeys["prime256v1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "compressed");
});
bench("ECDH key format - P384 compressed to uncompressed", () => {
const publicKey = testKeys["secp384r1"].compressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "uncompressed");
});
bench("ECDH key format - P384 uncompressed to compressed", () => {
const publicKey = testKeys["secp384r1"].uncompressed;
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "compressed");
});
await run();

View File

@@ -1,50 +0,0 @@
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
// Sample keys with different lengths
const keys = {
short: "secret",
long: "this-is-a-much-longer-secret-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
};
// Test parameters
const salts = ["", "salt"];
const infos = ["", "info"];
const hashes = ["sha256", "sha512"];
const sizes = [10, 1024];
// Benchmark sync HKDF
for (const hash of hashes) {
for (const keyName of Object.keys(keys)) {
const key = keys[keyName];
for (const size of sizes) {
bench(`hkdfSync ${hash} ${keyName}-key ${size} bytes`, () => {
return crypto.hkdfSync(hash, key, "salt", "info", size);
});
}
}
}
// Benchmark different combinations of salt and info
for (const salt of salts) {
for (const info of infos) {
bench(`hkdfSync sha256 with ${salt ? "salt" : "no-salt"} and ${info ? "info" : "no-info"}`, () => {
return crypto.hkdfSync("sha256", "secret", salt, info, 64);
});
}
}
// Benchmark async HKDF (using promises for cleaner benchmark)
// Note: async benchmarks in Mitata require returning a Promise
for (const hash of hashes) {
bench(`hkdf ${hash} async`, async () => {
return new Promise((resolve, reject) => {
crypto.hkdf(hash, "secret", "salt", "info", 64, (err, derivedKey) => {
if (err) reject(err);
else resolve(derivedKey);
});
});
});
}
await run();

View File

@@ -1,43 +0,0 @@
import { checkPrime, checkPrimeSync, generatePrime, generatePrimeSync } from "node:crypto";
import { bench, run } from "../runner.mjs";
const prime512 = generatePrimeSync(512);
const prime2048 = generatePrimeSync(2048);
bench("checkPrimeSync 512", () => {
return checkPrimeSync(prime512);
});
bench("checkPrimeSync 2048", () => {
return checkPrimeSync(prime2048);
});
bench("checkPrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime512, resolve)));
await Promise.all(promises);
});
bench("checkPrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime2048, resolve)));
await Promise.all(promises);
});
bench("generatePrimeSync 512", () => {
return generatePrimeSync(512);
});
bench("generatePrimeSync 2048", () => {
return generatePrimeSync(2048);
});
bench("generatePrime 512", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(512, resolve)));
await Promise.all(promises);
});
bench("generatePrime 2048", async () => {
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(2048, resolve)));
await Promise.all(promises);
});
await run();

View File

@@ -1,50 +0,0 @@
import crypto from "crypto";
import { bench, run } from "../runner.mjs";
bench("randomInt - sync", () => {
crypto.randomInt(1000);
});
bench("randomInt - async", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomInt(1000, () => {
resolve();
});
await promise;
});
bench("randonBytes - 32", () => {
crypto.randomBytes(32);
});
bench("randomBytes - 256", () => {
crypto.randomBytes(256);
});
const buf = Buffer.alloc(256);
bench("randomFill - 32", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 32, () => {
resolve();
});
await promise;
});
bench("randomFill - 256", async () => {
const { promise, resolve } = Promise.withResolvers();
crypto.randomFill(buf, 0, 256, () => {
resolve();
});
await promise;
});
bench("randomFillSync - 32", () => {
crypto.randomFillSync(buf, 0, 32);
});
bench("randomFillSync - 256", () => {
crypto.randomFillSync(buf, 0, 256);
});
await run();

View File

@@ -40,4 +40,4 @@ vitest (node v18.11.0)
> expect().toEqual() x 10000: 401.08ms
This project was created using `bun init` in bun v0.3.0. [Bun](https://bun.com) is a fast all-in-one JavaScript runtime.
This project was created using `bun init` in bun v0.3.0. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "expect-to-equal",
@@ -136,7 +136,7 @@
"@jest/console": ["@jest/console@29.4.3", "", { "dependencies": { "@jest/types": "^29.4.3", "@types/node": "*", "chalk": "^4.0.0", "jest-message-util": "^29.4.3", "jest-util": "^29.4.3", "slash": "^3.0.0" } }, "sha512-W/o/34+wQuXlgqlPYTansOSiBnuxrTv61dEVkA6HNmpcgHLUjfaUbdqt6oVvOzaawwo9IdW9QOtMgQ1ScSZC4A=="],
"@jest/core": ["@jest/core@29.4.3", "", { "dependencies": { "@jest/console": "^29.4.3", "@jest/reporters": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "ci-info": "^3.2.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^29.4.3", "jest-config": "^29.4.3", "jest-haste-map": "^29.4.3", "jest-message-util": "^29.4.3", "jest-regex-util": "^29.4.3", "jest-resolve": "^29.4.3", "jest-resolve-dependencies": "^29.4.3", "jest-runner": "^29.4.3", "jest-runtime": "^29.4.3", "jest-snapshot": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "jest-watcher": "^29.4.3", "micromatch": "^4.0.4", "pretty-format": "^29.4.3", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"] }, "sha512-56QvBq60fS4SPZCuM7T+7scNrkGIe7Mr6PVIXUpu48ouvRaWOFqRPV91eifvFM0ay2HmfswXiGf97NGUN5KofQ=="],
"@jest/core": ["@jest/core@29.4.3", "", { "dependencies": { "@jest/console": "^29.4.3", "@jest/reporters": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "ci-info": "^3.2.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "jest-changed-files": "^29.4.3", "jest-config": "^29.4.3", "jest-haste-map": "^29.4.3", "jest-message-util": "^29.4.3", "jest-regex-util": "^29.4.3", "jest-resolve": "^29.4.3", "jest-resolve-dependencies": "^29.4.3", "jest-runner": "^29.4.3", "jest-runtime": "^29.4.3", "jest-snapshot": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "jest-watcher": "^29.4.3", "micromatch": "^4.0.4", "pretty-format": "^29.4.3", "slash": "^3.0.0", "strip-ansi": "^6.0.0" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" } }, "sha512-56QvBq60fS4SPZCuM7T+7scNrkGIe7Mr6PVIXUpu48ouvRaWOFqRPV91eifvFM0ay2HmfswXiGf97NGUN5KofQ=="],
"@jest/environment": ["@jest/environment@29.4.3", "", { "dependencies": { "@jest/fake-timers": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "jest-mock": "^29.4.3" } }, "sha512-dq5S6408IxIa+lr54zeqce+QgI+CJT4nmmA+1yzFgtcsGK8c/EyiUb9XQOgz3BMKrRDfKseeOaxj2eO8LlD3lA=="],
@@ -148,7 +148,7 @@
"@jest/globals": ["@jest/globals@29.4.3", "", { "dependencies": { "@jest/environment": "^29.4.3", "@jest/expect": "^29.4.3", "@jest/types": "^29.4.3", "jest-mock": "^29.4.3" } }, "sha512-8BQ/5EzfOLG7AaMcDh7yFCbfRLtsc+09E1RQmRBI4D6QQk4m6NSK/MXo+3bJrBN0yU8A2/VIcqhvsOLFmziioA=="],
"@jest/reporters": ["@jest/reporters@29.4.3", "", { "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@jridgewell/trace-mapping": "^0.3.15", "@types/node": "*", "chalk": "^4.0.0", "collect-v8-coverage": "^1.0.0", "exit": "^0.1.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", "istanbul-reports": "^3.1.3", "jest-message-util": "^29.4.3", "jest-util": "^29.4.3", "jest-worker": "^29.4.3", "slash": "^3.0.0", "string-length": "^4.0.1", "strip-ansi": "^6.0.0", "v8-to-istanbul": "^9.0.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"] }, "sha512-sr2I7BmOjJhyqj9ANC6CTLsL4emMoka7HkQpcoMRlhCbQJjz2zsRzw0BDPiPyEFDXAbxKgGFYuQZiSJ1Y6YoTg=="],
"@jest/reporters": ["@jest/reporters@29.4.3", "", { "dependencies": { "@bcoe/v8-coverage": "^0.2.3", "@jest/console": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/transform": "^29.4.3", "@jest/types": "^29.4.3", "@jridgewell/trace-mapping": "^0.3.15", "@types/node": "*", "chalk": "^4.0.0", "collect-v8-coverage": "^1.0.0", "exit": "^0.1.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", "istanbul-reports": "^3.1.3", "jest-message-util": "^29.4.3", "jest-util": "^29.4.3", "jest-worker": "^29.4.3", "slash": "^3.0.0", "string-length": "^4.0.1", "strip-ansi": "^6.0.0", "v8-to-istanbul": "^9.0.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" } }, "sha512-sr2I7BmOjJhyqj9ANC6CTLsL4emMoka7HkQpcoMRlhCbQJjz2zsRzw0BDPiPyEFDXAbxKgGFYuQZiSJ1Y6YoTg=="],
"@jest/schemas": ["@jest/schemas@29.4.3", "", { "dependencies": { "@sinclair/typebox": "^0.25.16" } }, "sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg=="],
@@ -384,13 +384,13 @@
"istanbul-reports": ["istanbul-reports@3.1.5", "", { "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" } }, "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w=="],
"jest": ["jest@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/types": "^29.4.3", "import-local": "^3.0.2", "jest-cli": "^29.4.3" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"], "bin": { "jest": "bin/jest.js" } }, "sha512-XvK65feuEFGZT8OO0fB/QAQS+LGHvQpaadkH5p47/j3Ocqq3xf2pK9R+G0GzgfuhXVxEv76qCOOcMb5efLk6PA=="],
"jest": ["jest@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/types": "^29.4.3", "import-local": "^3.0.2", "jest-cli": "^29.4.3" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "bin": { "jest": "bin/jest.js" } }, "sha512-XvK65feuEFGZT8OO0fB/QAQS+LGHvQpaadkH5p47/j3Ocqq3xf2pK9R+G0GzgfuhXVxEv76qCOOcMb5efLk6PA=="],
"jest-changed-files": ["jest-changed-files@29.4.3", "", { "dependencies": { "execa": "^5.0.0", "p-limit": "^3.1.0" } }, "sha512-Vn5cLuWuwmi2GNNbokPOEcvrXGSGrqVnPEZV7rC6P7ck07Dyw9RFnvWglnupSh+hGys0ajGtw/bc2ZgweljQoQ=="],
"jest-circus": ["jest-circus@29.4.3", "", { "dependencies": { "@jest/environment": "^29.4.3", "@jest/expect": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/types": "^29.4.3", "@types/node": "*", "chalk": "^4.0.0", "co": "^4.6.0", "dedent": "^0.7.0", "is-generator-fn": "^2.0.0", "jest-each": "^29.4.3", "jest-matcher-utils": "^29.4.3", "jest-message-util": "^29.4.3", "jest-runtime": "^29.4.3", "jest-snapshot": "^29.4.3", "jest-util": "^29.4.3", "p-limit": "^3.1.0", "pretty-format": "^29.4.3", "slash": "^3.0.0", "stack-utils": "^2.0.3" } }, "sha512-Vw/bVvcexmdJ7MLmgdT3ZjkJ3LKu8IlpefYokxiqoZy6OCQ2VAm6Vk3t/qHiAGUXbdbJKJWnc8gH3ypTbB/OBw=="],
"jest-cli": ["jest-cli@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/types": "^29.4.3", "chalk": "^4.0.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "import-local": "^3.0.2", "jest-config": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "prompts": "^2.0.1", "yargs": "^17.3.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "optionalPeers": ["node-notifier"], "bin": { "jest": "bin/jest.js" } }, "sha512-PiiAPuFNfWWolCE6t3ZrDXQc6OsAuM3/tVW0u27UWc1KE+n/HSn5dSE6B2juqN7WP+PP0jAcnKtGmI4u8GMYCg=="],
"jest-cli": ["jest-cli@29.4.3", "", { "dependencies": { "@jest/core": "^29.4.3", "@jest/test-result": "^29.4.3", "@jest/types": "^29.4.3", "chalk": "^4.0.0", "exit": "^0.1.2", "graceful-fs": "^4.2.9", "import-local": "^3.0.2", "jest-config": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "prompts": "^2.0.1", "yargs": "^17.3.1" }, "peerDependencies": { "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" }, "bin": { "jest": "bin/jest.js" } }, "sha512-PiiAPuFNfWWolCE6t3ZrDXQc6OsAuM3/tVW0u27UWc1KE+n/HSn5dSE6B2juqN7WP+PP0jAcnKtGmI4u8GMYCg=="],
"jest-config": ["jest-config@29.4.3", "", { "dependencies": { "@babel/core": "^7.11.6", "@jest/test-sequencer": "^29.4.3", "@jest/types": "^29.4.3", "babel-jest": "^29.4.3", "chalk": "^4.0.0", "ci-info": "^3.2.0", "deepmerge": "^4.2.2", "glob": "^7.1.3", "graceful-fs": "^4.2.9", "jest-circus": "^29.4.3", "jest-environment-node": "^29.4.3", "jest-get-type": "^29.4.3", "jest-regex-util": "^29.4.3", "jest-resolve": "^29.4.3", "jest-runner": "^29.4.3", "jest-util": "^29.4.3", "jest-validate": "^29.4.3", "micromatch": "^4.0.4", "parse-json": "^5.2.0", "pretty-format": "^29.4.3", "slash": "^3.0.0", "strip-json-comments": "^3.1.1" }, "peerDependencies": { "@types/node": "*", "ts-node": ">=9.0.0" }, "optionalPeers": ["ts-node"] }, "sha512-eCIpqhGnIjdUCXGtLhz4gdDoxKSWXKjzNcc5r+0S1GKOp2fwOipx5mRcwa9GB/ArsxJ1jlj2lmlD9bZAsBxaWQ=="],
@@ -600,7 +600,7 @@
"vite": ["vite@4.1.2", "", { "dependencies": { "esbuild": "^0.16.14", "fsevents": "~2.3.2", "postcss": "^8.4.21", "resolve": "^1.22.1", "rollup": "^3.10.0" }, "peerDependencies": { "@types/node": ">= 14", "less": "*", "sass": "*", "stylus": "*", "sugarss": "*", "terser": "^5.4.0" }, "optionalPeers": ["less", "sass", "stylus", "sugarss", "terser"], "bin": { "vite": "bin/vite.js" } }, "sha512-MWDb9Rfy3DI8omDQySbMK93nQqStwbsQWejXRY2EBzEWKmLAXWb1mkI9Yw2IJrc+oCvPCI1Os5xSSIBYY6DEAw=="],
"vitest": ["vitest@0.25.8", "", { "dependencies": { "@types/chai": "^4.3.4", "@types/chai-subset": "^1.3.3", "@types/node": "*", "acorn": "^8.8.1", "acorn-walk": "^8.2.0", "chai": "^4.3.7", "debug": "^4.3.4", "local-pkg": "^0.4.2", "source-map": "^0.6.1", "strip-literal": "^1.0.0", "tinybench": "^2.3.1", "tinypool": "^0.3.0", "tinyspy": "^1.0.2", "vite": "^3.0.0 || ^4.0.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@vitest/browser": "*", "@vitest/ui": "*", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@edge-runtime/vm", "@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-X75TApG2wZTJn299E/TIYevr4E9/nBo1sUtZzn0Ci5oK8qnpZAZyhwg0qCeMSakGIWtc6oRwcQFyFfW14aOFWg=="],
"vitest": ["vitest@0.25.8", "", { "dependencies": { "@types/chai": "^4.3.4", "@types/chai-subset": "^1.3.3", "@types/node": "*", "acorn": "^8.8.1", "acorn-walk": "^8.2.0", "chai": "^4.3.7", "debug": "^4.3.4", "local-pkg": "^0.4.2", "source-map": "^0.6.1", "strip-literal": "^1.0.0", "tinybench": "^2.3.1", "tinypool": "^0.3.0", "tinyspy": "^1.0.2", "vite": "^3.0.0 || ^4.0.0" }, "peerDependencies": { "@edge-runtime/vm": "*", "@vitest/browser": "*", "@vitest/ui": "*", "happy-dom": "*", "jsdom": "*" }, "optionalPeers": ["@vitest/browser", "@vitest/ui", "happy-dom", "jsdom"], "bin": { "vitest": "vitest.mjs" } }, "sha512-X75TApG2wZTJn299E/TIYevr4E9/nBo1sUtZzn0Ci5oK8qnpZAZyhwg0qCeMSakGIWtc6oRwcQFyFfW14aOFWg=="],
"walker": ["walker@1.0.8", "", { "dependencies": { "makeerror": "1.0.12" } }, "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ=="],

View File

@@ -1,175 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View File

@@ -1,37 +0,0 @@
# express benchmark
This benchmarks a hello world express server.
To install dependencies:
```bash
bun install
```
To run in Bun:
```sh
bun ./express.mjs
```
To run in Node:
```sh
node ./express.mjs
```
To run in Deno:
```sh
deno run -A ./express.mjs
```
To benchmark each runtime:
```bash
oha http://localhost:3000 -n 500000 -H "Accept-Encoding: identity"
```
We recommend using `oha` or `bombardier` for benchmarking. We do not recommend using `ab`, as it uses HTTP/1.0 which stopped being used by web browsers in the early 2000s. We also do not recommend using autocannon, as the node:http client is not performant enough to measure the throughput of Bun's HTTP server.
Note the `Accept-Encoding: identity` header exists to prevent Deno's HTTP server from compressing the response.

Binary file not shown.

View File

@@ -1,14 +0,0 @@
// See the README.md for more information
import express from "express";
const app = express();
const port = process.env.PORT || 3000;
let i = 0;
app.get("/", (req, res) => {
res.send("Hello World! (request number: " + i++ + ")");
});
app.listen(port, () => {
console.log(`Express server listening on port ${port}`);
});

View File

@@ -1,14 +0,0 @@
{
"name": "express",
"module": "index.ts",
"type": "module",
"devDependencies": {
"@types/bun": "latest"
},
"peerDependencies": {
"typescript": "^5.0.0"
},
"dependencies": {
"express": "5"
}
}

View File

@@ -1,27 +0,0 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "Preserve",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -1,27 +1,19 @@
import { Glob } from "bun";
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
function benchPattern(name, glob, pattern) {
bench(name, () => {
new Glob(glob).match(pattern);
})
}
benchPattern("max-depth" , "1{2,3{4,5{6,7{8,9{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "13579bdfhjlm");
benchPattern("non-ascii", "😎/¢£.{ts,tsx,js,jsx}", "😎/¢£.jsx");
benchPattern("utf8", "フォルダ/**/*", "フォルダ/aaa.js");
benchPattern("non-ascii+max-depth" , "1{2,3{4,5{6,7{8,😎{a,b{c,d{e,f{g,h{i,j{k,l}}}}}}}}}}m", "1357😎bdfhjlm");
benchPattern("pretty-average", "test/{foo/**,bar}/baz", "test/bar/baz");
benchPattern("pretty-average-2", "a/**/c/*.md", "a/bb.bb/aa/b.b/aa/c/xyz.md");
benchPattern("pretty-average-3", "a/b/**/c{d,e}/**/xyz.md", "a/b/cd/xyz.md");
benchPattern("pretty-average-4", "foo/bar/**/one/**/*.*", "foo/bar/baz/one/two/three/image.png");
benchPattern("long-pretty-average", "some/**/needle.{js,tsx,mdx,ts,jsx,txt}", "some/a/bigger/path/to/the/crazy/needle.txt");
benchPattern("brackets-lots", "f[^eiu][^eiu][^eiu][^eiu][^eiu]r", "foo-bar");
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
doMatch("foo/bar.js", "**/*.js");
});
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
doMatch("bar.js", "*.js");
});
await run({
min_max: true,
percentiles: true,
avg: true,
})
avg: true,
min_max: true,
percentiles: true,
});

View File

@@ -1,19 +0,0 @@
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
doMatch("foo/bar.js", "**/*.js");
});
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
doMatch("bar.js", "*.js");
});
await run({
avg: true,
min_max: true,
percentiles: true,
});

View File

@@ -1,5 +1,5 @@
{
"lockfileVersion": 1,
"lockfileVersion": 0,
"workspaces": {
"": {
"name": "bench",

View File

@@ -0,0 +1,5 @@
bun
next
src/*.mov
src/*.blob

View File

@@ -0,0 +1,77 @@
SLEEP_INTERVAL ?= 32
SCREEN_WIDTH ?= $(shell system_profiler -json SPDisplaysDataType 2>/dev/null | jq -r '.. | objects | select(.spdisplays_main) | ._spdisplays_pixels | split(" ")[0]')
SCREEN_HEIGHT ?= $(shell system_profiler -json SPDisplaysDataType 2>/dev/null | jq -r '.. | objects | select(.spdisplays_main) | ._spdisplays_pixels | split(" ")[2]')
PROJECT ?= bun
PACKAGE_NAME ?= bun-cli
RUN_COUNT ?= 128
ENDPOINT ?= /
ifeq ($(PROJECT),bun)
PACKAGE_NAME := bun-cli
endif
ifeq ($(PROJECT),next)
PACKAGE_NAME := next
endif
generate:
@killall -9 bun next node || echo ""
PROJECT=$(PROJECT) SCREEN_WIDTH=$(SCREEN_WIDTH) SCREEN_HEIGHT=$(SCREEN_HEIGHT) ENDPOINT=$(ENDPOINT) node browser.js
generate-css-in-js:
@killall -9 bun next node || echo ""
PROJECT=$(PROJECT) SCREEN_WIDTH=$(SCREEN_WIDTH) SCREEN_HEIGHT=$(SCREEN_HEIGHT) ENDPOINT=/css-in-js node browser.js
loop:
cp src/colors.css.0 src/colors.css
sleep 3
osascript -e 'tell application "System Events" to tell process "Chromium"' \
-e 'set frontmost to true' \
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cp src/colors.css.blob $(PROJECT)/colors.css.blob
loop-emotion:
cp src/css-in-js-styles.0 src/css-in-js-styles.tsx
sleep 3
osascript -e 'tell application "System Events" to tell process "Chromium"' \
-e 'set frontmost to true' \
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cp src/css-in-js-styles.tsx.blob $(PROJECT)/css-in-js-styles.blob
process_video:
rm -rf $(FRAMES_DIR); mkdir -p $(FRAMES_DIR); ffmpeg -i src/colors.css.mov -vf fps=120,format=gray $(FRAMES_DIR)/%d.tif
FRAMES_DIR ?= $(shell mkdir -p ./$(PROJECT)/frames; realpath ./$(PROJECT)/frames)
TIF_FILES := $(wildcard $(FRAMES_DIR)/*.tif)
TXT_FILES := $(wildcard $(FRAMES_DIR)/*.txt)
OBJ_FILES := $(patsubst $(SRC_DIR)/%.tif,$(OBJ_DIR)/%.txt,$(TIF_FILES))
TRIM_FILES := $(patsubst $(SRC_DIR)/%.txt,$(OBJ_DIR)/%.trim,$(TXT_FILES))
frames: $(OBJ_FILES)
$(FRAMES_DIR)/%.txt: $(FRAMES_DIR)/%.tif
tesseract -l eng $< $@
trim: $(TRIM_FILES) cleanup print
$(FRAMES_DIR)/%.trim: $(FRAMES_DIR)/%.txt
(grep "Ran:" $< || echo "\n") >> $(PROJECT)/frames.all
cleanup:
sed 's/^Ran: *//' $(PROJECT)/frames.all | tr -d ' ' | sort | uniq > $(PROJECT)/frames.all.clean
print:
PACKAGE_NAME=$(PACKAGE_NAME) SLEEP_INTERVAL=$(SLEEP_INTERVAL) PROJECT=$(PROJECT) OUTFILE=timings/$(PACKAGE_NAME) node read-frames.js
print-emotion:
PACKAGE_NAME=$(PACKAGE_NAME) SLEEP_INTERVAL=$(SLEEP_INTERVAL) PROJECT=$(PROJECT) OUTFILE=timings/emotion_$(PACKAGE_NAME) node read-frames.js

View File

@@ -0,0 +1,62 @@
# CSS Stress Test
This benchmarks bundler performance for CSS hot reloading.
## Results
bun is 14x faster than Next.js at hot reloading CSS.
```
bun v0.0.34
Saving every 16ms
Frame time:
50th percentile: 22.2ms
75th percentile: 23.9ms
90th percentile: 25.3ms
95th percentile: 43.6ms
99th percentile: 49.1ms
Rendered frames: 922 / 1024 (90%)
```
```
Next.js v11.1.2
Saving every 16ms
Frame time:
50th percentile: 312ms
75th percentile: 337.6ms
90th percentile: 387.7ms
95th percentile: 446.9ms
99th percentile: 591.7ms
Rendered frames: 64 / 1024 (6%)
```
## How it works
It times pixels instead of builds. `color-looper.zig` writes color updates and the timestamp to a css file, while simultaneously screen recording a non-headless Chromium instance. After it finishes, it OCRs the video frames and verifies the scanned timestamps against the actual data. This data measures (1) how long each update took from saving to disk up to the pixels visible on the screen and (2) what % of frames were rendered.
The intent is to be as accurate as possible. Measuring times reported client-side is simpler, but lower accuracy since those times may not correspond to pixels on the screen and do not start from when the data was written to disk (at best, they measure when the filesystem watcher detected the update, but often not that either). `color-looper.zig` must run separately from `browser.js` or the results will be inaccurate.
It works like this:
1. `browser.js` loads either bun or Next.js and a Chromium instance opened to the correct webpage
2. `color-looper.zig` updates [`./src/colors.css`](./src/colors.css) in a loop up to `1024` times (1024 is arbitrary), sleeping every `16`ms or `32`ms (a CLI arg you can pass it). The `var(--timestamp)` CSS variable contains the UTC timestamp with precision of milliseconds and one extra decimal point
3. `color-looper.zig` automatically records the screen via `screencapture` (builtin on macOS) and saves it, along with a `BigUint64Array` containing all the expected timestamps. When it's done, it writes to a designated file on disk which `browser.js` picks up as the signal to close the browser.
4. `ffmpeg` converts each frame into a black and white `.tif` file, which `tesseract` then OCRs
5. Various cleanup scripts extract the timestamp from each of those OCR'd frames into a single file
6. Using the OCR'd data, `./read-frames.js` calculates the 50th, 75th, 90th, 95th, and 99th percentile frame time, along with how many frames were skipped. Frame time is the metric here that matters here because that's how much time elapsed between each update. It includes the artificial sleep interval, so it will not be faster than the sleep interval.
The script `run.sh` runs all the commands necessary to do this work unattended. It takes awhile though. The slow part is OCR'ing all the frames.
To run this, you need:
- `zig`
- `bun-cli`
- `node`
- `tesseract`
- `screencapture` (macOS builtin)
- `ffmpeg`
- `puppeteer` (from the package.json)
You will need to run `bun bun --use next` first, with `next@11.1.2`. It will only run on macOS due to the dependencies on `screencapture`, how it detects screen resolution (so that Chromium is maximized), and how it auto-focuses Chromium (apple script)

View File

@@ -0,0 +1,114 @@
const puppeteer = require("puppeteer");
const http = require("http");
const path = require("path");
const url = require("url");
const fs = require("fs");
const child_process = require("child_process");
const serverURL = process.env.TEST_SERVER_URL || "http://localhost:8080";
if (process.env.PROJECT === "bun") {
const bunFlags = [`--origin=${serverURL}`].filter(Boolean);
const bunExec = process.env.BUN_BIN || "bun";
const bunProcess = child_process.spawn(bunExec, bunFlags, {
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
DISABLE_BUN_ANALYTICS: "1",
},
shell: false,
});
console.log("$", bunExec, bunFlags.join(" "));
const isDebug = bunExec.endsWith("-debug");
// bunProcess.stderr.pipe(process.stderr);
// bunProcess.stdout.pipe(process.stdout);
bunProcess.once("error", err => {
console.error("❌ bun error", err);
process.exit(1);
});
process.on("beforeExit", () => {
bunProcess?.kill(0);
});
} else if (process.env.PROJECT === "next") {
const bunProcess = child_process.spawn("./node_modules/.bin/next", ["--port", "8080"], {
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
shell: false,
});
}
const delay = new Promise((resolve, reject) => {
const watcher = fs.watch(path.resolve(process.cwd(), "src/colors.css.blob"));
watcher.once("change", () => {
setTimeout(() => {
resolve();
}, 1000);
});
});
async function main() {
const browser = await puppeteer.launch({
headless: false,
waitForInitialPage: true,
args: [
`--window-size=${parseInt(process.env.SCREEN_WIDTH || "1024", 10) / 2},${
parseInt(process.env.SCREEN_HEIGHT || "1024", 10) / 2
}`,
],
defaultViewport: {
width: parseInt(process.env.SCREEN_WIDTH || "1024", 10) / 2,
height: parseInt(process.env.SCREEN_HEIGHT || "1024", 10) / 2,
},
});
const promises = [];
let allTestsPassed = true;
async function runPage(key) {
var page;
try {
console.log("Opening page");
page = await browser.newPage();
console.log(`Navigating to "http://localhost:8080/"`);
while (true) {
try {
await page.goto("http://localhost:8080/", { waitUntil: "load" });
break;
} catch (exception) {
if (!exception.toString().includes("ERR_CONNECTION_REFUSED")) break;
}
}
await page.bringToFront();
await delay;
// runner.stdout.pipe(process.stdout);
// runner.stderr.pipe(process.stderr);
var didResolve = false;
console.log(`Completed. Done.`);
} catch (error) {
console.error(error);
} finally {
await page.close();
await browser.close();
}
}
return runPage();
}
main().catch(error =>
setTimeout(() => {
throw error;
}),
);

View File

@@ -0,0 +1,11 @@
SLEEP_INTERVAL=16 PROJECT=bun node read-frames.js
bun
--------------------------------------------------
CSS HMR FRAME TIME
50th percentile: 22.2ms
75th percentile: 23.9ms
90th percentile: 25.3ms
95th percentile: 43.6ms
99th percentile: 49.1ms
Rendered frames: 922 / 1024 (90%)

View File

@@ -0,0 +1,11 @@
SLEEP_INTERVAL=24 PROJECT=bun node read-frames.js
bun
--------------------------------------------------
CSS HMR FRAME TIME
50th percentile: 33.4ms
75th percentile: 34.5ms
90th percentile: 35.8ms
95th percentile: 65.5ms
99th percentile: 87.9ms
Rendered frames: 937 / 1024 (92%)

View File

@@ -0,0 +1,11 @@
SLEEP_INTERVAL=32 PROJECT=bun node read-frames.js
bun
--------------------------------------------------
CSS HMR FRAME TIME
50th percentile: 40.7ms
75th percentile: 42.3ms
90th percentile: 43.5ms
95th percentile: 76.4ms
99th percentile: 118.8ms
Rendered frames: 958 / 1024 (94%)

View File

@@ -0,0 +1,11 @@
SLEEP_INTERVAL=8 PROJECT=bun node read-frames.js
bun
--------------------------------------------------
CSS HMR FRAME TIME
50th percentile: 20ms
75th percentile: 24.4ms
90th percentile: 41ms
95th percentile: 53.9ms
99th percentile: 90.4ms
Rendered frames: 475 / 1024 (46%)

Some files were not shown because too many files have changed in this diff Show More