Compare commits

..

3 Commits

Author SHA1 Message Date
dave caruso
8ad1b41391 Merge remote-tracking branch 'origin/main' into dave/eradicate-c-api 2024-03-06 15:31:53 -08:00
dave caruso
bdee36f51f a 2024-02-24 21:21:18 -08:00
dave caruso
9e28dbda6e nuke ExceptionRef; design a new system for handling js errors. 2024-02-24 03:03:07 -08:00
39183 changed files with 584331 additions and 1314511 deletions

View File

@@ -1,78 +0,0 @@
import { spawnSync } from "node:child_process";
import { readFileSync, existsSync } from "node:fs";
import { parseArgs } from "node:util";
const { positionals, values } = parseArgs({
allowPositionals: true,
options: {
help: {
type: "boolean",
short: "h",
default: false,
},
interactive: {
type: "boolean",
short: "i",
default: false,
},
},
});
if (values.help || positionals.length === 0) {
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
console.log("Example: node agent.mjs triage fix bug in authentication");
console.log("Options:");
console.log(" -h, --help Show this help message");
console.log(" -i, --interactive Run in interactive mode");
process.exit(0);
}
const promptName = positionals[0].toUpperCase();
const promptFile = `.agent/${promptName}.md`;
const extraArgs = positionals.slice(1);
if (!existsSync(promptFile)) {
console.error(`Error: Prompt file "${promptFile}" not found`);
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
process.exit(1);
}
try {
let prompt = readFileSync(promptFile, "utf-8");
const githubEnvs = Object.entries(process.env)
.filter(([key]) => key.startsWith("GITHUB_"))
.sort(([a], [b]) => a.localeCompare(b));
if (githubEnvs.length > 0) {
const githubContext = `## GitHub Environment\n\n${githubEnvs
.map(([key, value]) => `**${key}**: \`${value}\``)
.join("\n")}\n\n---\n\n`;
prompt = githubContext + prompt;
}
if (extraArgs.length > 0) {
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
prompt = prompt + extraArgsContext;
}
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
if (!values.interactive) {
claudeArgs.unshift("--print");
}
const { status, error } = spawnSync("claude", claudeArgs, {
stdio: "inherit",
encoding: "utf-8",
});
if (error) {
console.error("Error running claude:", error);
process.exit(1);
}
process.exit(status || 0);
} catch (error) {
console.error(`Error reading prompt file "${promptFile}":`, error);
process.exit(1);
}

View File

@@ -1,164 +0,0 @@
ARG LLVM_VERSION="19"
ARG REPORTED_LLVM_VERSION="19.1.7"
ARG OLD_BUN_VERSION="1.1.38"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION
ARG OLD_BUN_VERSION
ARG TARGETARCH
ARG REPORTED_LLVM_VERSION
ENV DEBIAN_FRONTEND=noninteractive \
CI=true \
DOCKER=true
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
RUN apt-get update && apt-get install -y --no-install-recommends \
wget curl git python3 python3-pip ninja-build \
software-properties-common apt-transport-https \
ca-certificates gnupg lsb-release unzip \
libxml2-dev ruby ruby-dev bison gawk perl make golang \
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
&& apt-get update \
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
libgfortran5 libc6-dev \
&& wget https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \
&& ./llvm.sh ${LLVM_VERSION} all \
&& rm llvm.sh
RUN --mount=type=tmpfs,target=/tmp \
cmake_version="3.30.5" && \
if [ "$TARGETARCH" = "arm64" ]; then \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
else \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
fi && \
wget -O /tmp/cmake.sh "$cmake_url" && \
sh /tmp/cmake.sh --skip-license --prefix=/usr
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include
RUN if [ "$TARGETARCH" = "arm64" ]; then \
export ARCH_PATH="aarch64-linux-gnu"; \
else \
export ARCH_PATH="x86_64-linux-gnu"; \
fi \
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
&& ldconfig
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
&& ln -sf /usr/bin/clang /usr/bin/cc \
&& ln -sf /usr/bin/clang++ /usr/bin/c++
ENV CC="clang" \
CXX="clang++" \
AR="llvm-ar-${LLVM_VERSION}" \
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
LD="lld-${LLVM_VERSION}"
RUN --mount=type=tmpfs,target=/tmp \
bash -c '\
set -euxo pipefail && \
source /etc/environment && \
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
unzip /tmp/bun.zip -d /tmp/bun && \
mv /tmp/bun/*/bun /usr/bin/bun && \
chmod +x /usr/bin/bun'
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
WORKDIR /workspace
FROM --platform=$BUILDPLATFORM base as buildkite
ARG BUILDKITE_AGENT_TAGS
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
echo "Downloading buildkite" && \
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
mkdir -p /tmp/buildkite-agent && \
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
COPY ../*/agent.mjs /var/bun/scripts/
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
WORKDIR /var/bun/scripts
ENV PATH=/root/.cargo/bin:$PATH
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
ARG LLVM_VERSION
WORKDIR /workspace/bun
COPY . /workspace/bun
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
ENV PATH=/root/.cargo/bin:$PATH
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
RUN --mount=type=tmpfs,target=/workspace/bun/build \
ls -la \
&& bun run build:release \
&& mkdir -p /target \
&& cp -r /workspace/bun/build/release/bun /target/bun

View File

@@ -1,122 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "error: must run as root"
exit 1
fi
# Check OS compatibility
if ! command -v dnf &> /dev/null; then
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
exit 1
fi
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
# Print each missing file
if [ ! -f /tmp/agent.mjs ]; then
echo "error: /tmp/agent.mjs is missing"
fi
if [ ! -f /tmp/Dockerfile ]; then
echo "error: /tmp/Dockerfile is missing"
fi
exit 1
fi
# Install Docker
dnf update -y
dnf install -y docker
systemctl enable docker
systemctl start docker || {
echo "error: failed to start Docker"
exit 1
}
# Create builder
docker buildx create --name builder --driver docker-container --bootstrap --use || {
echo "error: failed to create Docker buildx builder"
exit 1
}
# Set up Docker to start on boot
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
[Unit]
Description=Buildkite Docker Container
After=docker.service network-online.target
Requires=docker.service network-online.target
[Service]
TimeoutStartSec=0
Restart=always
RestartSec=5
ExecStartPre=-/usr/bin/docker stop buildkite
ExecStartPre=-/usr/bin/docker rm buildkite
ExecStart=/usr/bin/docker run \
--name buildkite \
--restart=unless-stopped \
--network host \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp \
buildkite:latest
[Install]
WantedBy=multi-user.target
EOF
echo "Building Buildkite image"
# Clean up any previous build artifacts
rm -rf /tmp/fakebun
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
# Copy required files
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
echo "error: failed to copy agent.mjs"
exit 1
}
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
echo "error: failed to copy Dockerfile"
exit 1
}
cd /tmp/fakebun || {
echo "error: failed to change directory"
exit 1
}
# Build the Buildkite image
docker buildx build \
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
--tag buildkite:latest \
--target buildkite \
-f .buildkite/Dockerfile \
--load \
. || {
echo "error: Docker build failed"
exit 1
}
# Create container to ensure image is cached in AMI
docker container create \
--name buildkite \
--restart=unless-stopped \
buildkite:latest || {
echo "error: failed to create buildkite container"
exit 1
}
# Reload systemd to pick up new service
systemctl daemon-reload
# Enable the service, but don't start it yet
systemctl enable buildkite-agent || {
echo "error: failed to enable buildkite-agent service"
exit 1
}
echo "Bootstrap complete"
echo "To start the Buildkite agent, run: "
echo " systemctl start buildkite-agent"

View File

@@ -1,16 +0,0 @@
# Uploads the latest CI workflow to Buildkite.
# https://buildkite.com/docs/pipelines/defining-steps
#
# Changes to this file must be manually edited here:
# https://buildkite.com/bun/bun/settings/steps
steps:
- if: "build.pull_request.repository.fork"
block: ":eyes:"
prompt: "Did you review the PR?"
blocked_state: "running"
- label: ":pipeline:"
agents:
queue: "build-darwin"
command:
- "node .buildkite/ci.mjs"

File diff suppressed because it is too large Load Diff

View File

@@ -1,255 +0,0 @@
# macOS Runner Infrastructure - Claude Development Guide
This document provides context and guidance for Claude to work on the macOS runner infrastructure.
## Overview
This infrastructure provides automated, scalable macOS CI runners for Bun using MacStadium's Orka platform. It implements complete job isolation, daily image rebuilds, and comprehensive testing.
## Architecture
### Core Components
- **Packer**: Builds VM images with all required software
- **Terraform**: Manages VM fleet with auto-scaling
- **GitHub Actions**: Automates daily rebuilds and deployments
- **User Management**: Creates isolated users per job (`bk-<job-id>`)
### Key Features
- **Complete Job Isolation**: Each Buildkite job runs in its own user account
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh environments
- **Flakiness Testing**: Multiple test iterations ensure reliability (80% success rate minimum)
- **Software Validation**: All tools tested for proper installation and functionality
- **Version Synchronization**: Exact versions match bootstrap.sh requirements
## File Structure
```
.buildkite/macos-runners/
├── packer/
│ └── macos-base.pkr.hcl # VM image building configuration
├── terraform/
│ ├── main.tf # Infrastructure definition
│ ├── variables.tf # Configuration variables
│ ├── outputs.tf # Resource outputs
│ └── user-data.sh # VM initialization script
├── scripts/
│ ├── bootstrap-macos.sh # macOS software installation
│ ├── create-build-user.sh # User creation for job isolation
│ ├── cleanup-build-user.sh # User cleanup after jobs
│ └── job-runner.sh # Main job lifecycle management
├── github-actions/
│ ├── image-rebuild.yml # Daily image rebuild workflow
│ └── deploy-fleet.yml # Fleet deployment workflow
├── README.md # User documentation
├── DEPLOYMENT.md # Deployment guide
└── CLAUDE.md # This file
```
## Software Versions (Must Match bootstrap.sh)
These versions are synchronized with `/scripts/bootstrap.sh`:
- **Node.js**: 24.3.0 (exact)
- **Bun**: 1.2.17 (exact)
- **LLVM**: 19.1.7 (exact)
- **CMake**: 3.30.5 (exact)
- **Buildkite Agent**: 3.87.0
## Key Scripts
### bootstrap-macos.sh
- Installs all required software with exact versions
- Configures development environment
- Sets up Tailscale, Docker, and other dependencies
- **Critical**: Must stay synchronized with main bootstrap.sh
### create-build-user.sh
- Creates unique user per job: `bk-<job-id>`
- Sets up isolated environment with proper permissions
- Configures shell environment and paths
- Creates workspace directories
### cleanup-build-user.sh
- Kills all processes owned by build user
- Removes user account and home directory
- Cleans up temporary files and caches
- Ensures complete isolation between jobs
### job-runner.sh
- Main orchestration script
- Manages job lifecycle: create user → run job → cleanup
- Handles timeouts and health checks
- Runs as root via LaunchDaemon
## GitHub Actions Workflows
### image-rebuild.yml
- Runs daily at 2 AM UTC
- Detects changes to trigger rebuilds
- Builds images for macOS 13, 14, 15
- **Validation Steps**:
- Software installation verification
- Flakiness testing (3 iterations, 80% success rate)
- Health endpoint testing
- Discord notifications for status
### deploy-fleet.yml
- Manual deployment trigger
- Validates inputs and plans changes
- Deploys VM fleet with health checks
- Supports different environments (prod/staging/dev)
## Required Secrets
### MacStadium
- `MACSTADIUM_API_KEY`: API access key
- `ORKA_ENDPOINT`: Orka API endpoint
- `ORKA_AUTH_TOKEN`: Authentication token
### AWS
- `AWS_ACCESS_KEY_ID`: For Terraform state storage
- `AWS_SECRET_ACCESS_KEY`: For Terraform state storage
### Buildkite
- `BUILDKITE_AGENT_TOKEN`: Agent registration token
- `BUILDKITE_API_TOKEN`: For monitoring/status checks
- `BUILDKITE_ORG`: Organization slug
### GitHub
- `GITHUB_TOKEN`: For private repository access
### Notifications
- `DISCORD_WEBHOOK_URL`: For status notifications
## Development Guidelines
### Adding New Software
1. Update `bootstrap-macos.sh` with installation commands
2. Add version verification in the script
3. Include in validation tests in `image-rebuild.yml`
4. Update documentation in README.md
### Modifying User Isolation
1. Update `create-build-user.sh` for user creation
2. Update `cleanup-build-user.sh` for cleanup
3. Test isolation in `job-runner.sh`
4. Ensure proper permissions and security
### Updating VM Configuration
1. Modify `terraform/variables.tf` for fleet sizing
2. Update `terraform/main.tf` for infrastructure changes
3. Test deployment with `deploy-fleet.yml`
4. Update documentation
### Version Updates
1. **Critical**: Check `/scripts/bootstrap.sh` for version changes
2. Update exact versions in `bootstrap-macos.sh`
3. Update version verification in workflows
4. Update documentation
## Testing Strategy
### Image Validation
- Software installation verification
- Version checking for exact matches
- Health endpoint testing
- Basic functionality tests
### Flakiness Testing
- 3 test iterations per image
- 80% success rate minimum
- Tests basic commands, Node.js, Bun, build tools
- Automated cleanup of test VMs
### Integration Testing
- End-to-end job execution
- User isolation verification
- Resource cleanup validation
- Performance monitoring
## Troubleshooting
### Common Issues
1. **Version Mismatches**: Check bootstrap.sh for updates
2. **User Cleanup Failures**: Check process termination and file permissions
3. **Image Build Failures**: Check Packer logs and VM resources
4. **Flakiness**: Investigate VM performance and network issues
### Debugging Commands
```bash
# Check VM status
orka vm list
# Check image status
orka image list
# Test user creation
sudo /usr/local/bin/bun-ci/create-build-user.sh
# Check health endpoint
curl http://localhost:8080/health
# View logs
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
```
## Performance Considerations
### Resource Management
- VMs configured with 12 CPU cores, 32GB RAM
- Auto-scaling based on queue demand
- Aggressive cleanup to prevent resource leaks
### Cost Optimization
- Automated cleanup of old images and snapshots
- Efficient VM sizing based on workload requirements
- Scheduled maintenance windows
## Security
### Isolation
- Complete process isolation per job
- Separate user accounts with unique UIDs
- Cleanup of all user data after jobs
### Network Security
- VPC isolation with security groups
- Limited SSH access for debugging
- Encrypted communications
### Credential Management
- Secure secret storage in GitHub
- No hardcoded credentials in code
- Regular rotation of access tokens
## Monitoring
### Health Checks
- HTTP endpoints on port 8080
- Buildkite agent connectivity monitoring
- Resource usage tracking
### Alerts
- Discord notifications for failures
- Build status reporting
- Fleet deployment notifications
## Next Steps for Development
1. **Monitor bootstrap.sh**: Watch for version updates that need synchronization
2. **Performance Optimization**: Monitor resource usage and optimize VM sizes
3. **Enhanced Testing**: Add more comprehensive validation tests
4. **Cost Monitoring**: Track usage and optimize for cost efficiency
5. **Security Hardening**: Regular security reviews and updates
## References
- [MacStadium Orka Documentation](https://orkadocs.macstadium.com/)
- [Packer Documentation](https://www.packer.io/docs)
- [Terraform Documentation](https://www.terraform.io/docs)
- [Buildkite Agent Documentation](https://buildkite.com/docs/agent/v3)
- [Main bootstrap.sh](../../scripts/bootstrap.sh) - **Keep synchronized!**
---
**Important**: This infrastructure is critical for Bun's CI/CD pipeline. Always test changes thoroughly and maintain backward compatibility. The `bootstrap-macos.sh` script must stay synchronized with the main `bootstrap.sh` script to ensure consistent environments.

View File

@@ -1,428 +0,0 @@
# macOS Runner Deployment Guide
This guide provides step-by-step instructions for deploying the macOS runner infrastructure for Bun CI.
## Prerequisites
### 1. MacStadium Account Setup
1. **Create MacStadium Account**
- Sign up at [MacStadium](https://www.macstadium.com/)
- Purchase Orka plan with appropriate VM allocation
2. **Configure API Access**
- Generate API key from MacStadium dashboard
- Note down your Orka endpoint URL
- Test API connectivity
3. **Base Image Preparation**
- Ensure base macOS images are available in your account
- Verify image naming convention: `base-images/macos-{version}-{name}`
### 2. AWS Account Setup
1. **Create AWS Account**
- Set up AWS account for Terraform state storage
- Create S3 bucket for Terraform backend: `bun-terraform-state`
2. **Configure IAM**
- Create IAM user with appropriate permissions
- Generate access key and secret key
- Attach policies for S3, CloudWatch, and EC2 (if using AWS resources)
### 3. GitHub Repository Setup
1. **Fork or Clone Repository**
- Ensure you have admin access to the repository
- Create necessary branches for deployment
2. **Configure Repository Secrets**
- Add all required secrets (see main README.md)
- Test secret accessibility
### 4. Buildkite Setup
1. **Organization Configuration**
- Create or access Buildkite organization
- Generate agent token with appropriate permissions
- Note organization slug
2. **Queue Configuration**
- Create queues: `macos`, `macos-arm64`, `macos-x86_64`
- Configure queue-specific settings
## Step-by-Step Deployment
### Step 1: Environment Preparation
1. **Install Required Tools**
```bash
# Install Terraform
wget https://releases.hashicorp.com/terraform/1.6.0/terraform_1.6.0_linux_amd64.zip
unzip terraform_1.6.0_linux_amd64.zip
sudo mv terraform /usr/local/bin/
# Install Packer
wget https://releases.hashicorp.com/packer/1.9.4/packer_1.9.4_linux_amd64.zip
unzip packer_1.9.4_linux_amd64.zip
sudo mv packer /usr/local/bin/
# Install AWS CLI
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
# Install MacStadium CLI
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
sudo mv orka-cli /usr/local/bin/orka
```
2. **Configure AWS Credentials**
```bash
aws configure
# Enter your AWS access key, secret key, and region
```
3. **Configure MacStadium CLI**
```bash
orka config set endpoint <your-orka-endpoint>
orka auth token <your-orka-token>
```
### Step 2: SSH Key Setup
1. **Generate SSH Key Pair**
```bash
ssh-keygen -t rsa -b 4096 -f ~/.ssh/bun-runner -N ""
```
2. **Copy Public Key to Terraform Directory**
```bash
mkdir -p .buildkite/macos-runners/terraform/ssh-keys
cp ~/.ssh/bun-runner.pub .buildkite/macos-runners/terraform/ssh-keys/bun-runner.pub
```
### Step 3: Terraform Backend Setup
1. **Create S3 Bucket for Terraform State**
```bash
aws s3 mb s3://bun-terraform-state --region us-west-2
aws s3api put-bucket-versioning --bucket bun-terraform-state --versioning-configuration Status=Enabled
aws s3api put-bucket-encryption --bucket bun-terraform-state --server-side-encryption-configuration '{
"Rules": [
{
"ApplyServerSideEncryptionByDefault": {
"SSEAlgorithm": "AES256"
}
}
]
}'
```
2. **Create Terraform Variables File**
```bash
cd .buildkite/macos-runners/terraform
cat > production.tfvars << EOF
environment = "production"
macstadium_api_key = "your-macstadium-api-key"
buildkite_agent_token = "your-buildkite-agent-token"
github_token = "your-github-token"
fleet_size = {
macos_13 = 4
macos_14 = 6
macos_15 = 8
}
vm_configuration = {
cpu_count = 12
memory_gb = 32
disk_size = 500
}
EOF
```
### Step 4: Build VM Images
1. **Validate Packer Configuration**
```bash
cd .buildkite/macos-runners/packer
packer validate -var "macos_version=15" macos-base.pkr.hcl
```
2. **Build macOS 15 Image**
```bash
packer build \
-var "macos_version=15" \
-var "orka_endpoint=<your-orka-endpoint>" \
-var "orka_auth_token=<your-orka-token>" \
macos-base.pkr.hcl
```
3. **Build macOS 14 Image**
```bash
packer build \
-var "macos_version=14" \
-var "orka_endpoint=<your-orka-endpoint>" \
-var "orka_auth_token=<your-orka-token>" \
macos-base.pkr.hcl
```
4. **Build macOS 13 Image**
```bash
packer build \
-var "macos_version=13" \
-var "orka_endpoint=<your-orka-endpoint>" \
-var "orka_auth_token=<your-orka-token>" \
macos-base.pkr.hcl
```
### Step 5: Deploy VM Fleet
1. **Initialize Terraform**
```bash
cd .buildkite/macos-runners/terraform
terraform init
```
2. **Create Production Workspace**
```bash
terraform workspace new production
```
3. **Plan Deployment**
```bash
terraform plan -var-file="production.tfvars"
```
4. **Apply Deployment**
```bash
terraform apply -var-file="production.tfvars"
```
### Step 6: Verify Deployment
1. **Check VM Status**
```bash
orka vm list
```
2. **Check Terraform Outputs**
```bash
terraform output
```
3. **Test VM Connectivity**
```bash
# Get VM IP from terraform output
VM_IP=$(terraform output -json vm_instances | jq -r '.value | to_entries[0].value.ip_address')
# Test SSH connectivity
ssh -i ~/.ssh/bun-runner admin@$VM_IP
# Test health endpoint
curl http://$VM_IP:8080/health
```
4. **Verify Buildkite Agent Connectivity**
```bash
curl -H "Authorization: Bearer <your-buildkite-api-token>" \
"https://api.buildkite.com/v2/organizations/<your-org>/agents"
```
### Step 7: Configure GitHub Actions
1. **Enable GitHub Actions Workflows**
- Navigate to repository Actions tab
- Enable workflows if not already enabled
2. **Test Image Rebuild Workflow**
```bash
# Trigger manual rebuild
gh workflow run image-rebuild.yml
```
3. **Test Fleet Deployment Workflow**
```bash
# Trigger manual deployment
gh workflow run deploy-fleet.yml
```
## Post-Deployment Configuration
### 1. Monitoring Setup
1. **CloudWatch Dashboards**
- Create custom dashboards for VM metrics
- Set up alarms for critical thresholds
2. **Discord Notifications**
- Configure Discord webhook for alerts
- Test notification delivery
### 2. Backup Configuration
1. **Enable Automated Snapshots**
```bash
# Update terraform configuration
backup_config = {
enable_snapshots = true
snapshot_schedule = "0 4 * * *"
snapshot_retention = 7
}
```
2. **Test Backup Restoration**
- Create test snapshot
- Verify restoration process
### 3. Security Hardening
1. **Review Security Groups**
- Minimize open ports
- Restrict source IP ranges
2. **Enable Audit Logging**
- Configure CloudTrail for AWS resources
- Enable MacStadium audit logs
### 4. Performance Optimization
1. **Monitor Resource Usage**
- Review CPU, memory, disk usage
- Adjust VM sizes if needed
2. **Optimize Auto-Scaling**
- Monitor scaling events
- Adjust thresholds as needed
## Maintenance Procedures
### Daily Maintenance
1. **Automated Tasks**
- Image rebuilds (automatic)
- Health checks (automatic)
- Cleanup processes (automatic)
2. **Manual Monitoring**
- Check Discord notifications
- Review CloudWatch metrics
- Monitor Buildkite queue
### Weekly Maintenance
1. **Review Metrics**
- Analyze performance trends
- Check cost optimization opportunities
2. **Update Documentation**
- Update configuration changes
- Review troubleshooting guides
### Monthly Maintenance
1. **Capacity Planning**
- Review usage patterns
- Plan capacity adjustments
2. **Security Updates**
- Review security patches
- Update base images if needed
## Troubleshooting Common Issues
### Issue: VM Creation Fails
```bash
# Check MacStadium account limits
orka account info
# Check available resources
orka resource list
# Review Packer logs
tail -f packer-build.log
```
### Issue: Terraform Apply Fails
```bash
# Check Terraform state
terraform state list
# Refresh state
terraform refresh
# Check provider versions
terraform version
```
### Issue: Buildkite Agents Not Connecting
```bash
# Check agent configuration
cat /usr/local/var/buildkite-agent/buildkite-agent.cfg
# Check agent logs
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
# Restart agent service
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
```
## Rollback Procedures
### Rollback VM Fleet
1. **Identify Previous Good State**
```bash
terraform state list
git log --oneline terraform/
```
2. **Rollback to Previous Configuration**
```bash
git checkout <previous-commit>
terraform plan -var-file="production.tfvars"
terraform apply -var-file="production.tfvars"
```
### Rollback VM Images
1. **List Available Images**
```bash
orka image list
```
2. **Update Terraform to Use Previous Images**
```bash
# Edit terraform configuration to use previous image IDs
terraform plan -var-file="production.tfvars"
terraform apply -var-file="production.tfvars"
```
## Cost Optimization Tips
1. **Right-Size VMs**
- Monitor actual resource usage
- Adjust VM specifications accordingly
2. **Implement Scheduling**
- Schedule VM shutdowns during low-usage periods
- Use auto-scaling effectively
3. **Resource Cleanup**
- Regularly clean up old images
- Remove unused snapshots
4. **Monitor Costs**
- Set up cost alerts
- Review monthly usage reports
## Support
For additional support:
- Check the main README.md for troubleshooting
- Review GitHub Actions logs
- Contact MacStadium support for platform issues
- Open issues in the repository for infrastructure problems

View File

@@ -1,374 +0,0 @@
# macOS Runner Infrastructure
This directory contains the infrastructure-as-code for deploying and managing macOS CI runners for the Bun project. It is located in the `.buildkite` folder alongside other CI configuration. The infrastructure provides automated, scalable, and reliable macOS build environments using MacStadium's Orka platform.
## Architecture Overview
The infrastructure consists of several key components:
1. **VM Images**: Golden images built with Packer containing all necessary software
2. **VM Fleet**: Terraform-managed fleet of macOS VMs across different versions
3. **User Isolation**: Per-job user creation and cleanup for complete isolation
4. **Automation**: GitHub Actions workflows for daily image rebuilds and fleet management
## Key Features
- **Complete Isolation**: Each Buildkite job runs in its own user account
- **Automatic Cleanup**: Processes and temporary files are cleaned up after each job
- **Daily Image Rebuilds**: Automated nightly rebuilds ensure fresh, up-to-date environments
- **Multi-Version Support**: Supports macOS 13, 14, and 15 simultaneously
- **Auto-Scaling**: Automatic scaling based on job queue demand
- **Health Monitoring**: Continuous health checks and monitoring
- **Cost Optimization**: Efficient resource utilization and cleanup
## Directory Structure
```
.buildkite/macos-runners/
├── packer/ # Packer configuration for VM images
│ ├── macos-base.pkr.hcl # Main Packer configuration
│ └── ssh-keys/ # SSH keys for VM access
├── terraform/ # Terraform configuration for VM fleet
│ ├── main.tf # Main Terraform configuration
│ ├── variables.tf # Variable definitions
│ ├── outputs.tf # Output definitions
│ └── user-data.sh # VM initialization script
├── scripts/ # Management and utility scripts
│ ├── bootstrap-macos.sh # macOS-specific bootstrap script
│ ├── create-build-user.sh # User creation script
│ ├── cleanup-build-user.sh # User cleanup script
│ └── job-runner.sh # Main job runner script
├── github-actions/ # GitHub Actions workflows
│ ├── image-rebuild.yml # Daily image rebuild workflow
│ └── deploy-fleet.yml # Fleet deployment workflow
└── README.md # This file
```
## Prerequisites
Before deploying the infrastructure, ensure you have:
1. **MacStadium Account**: Active MacStadium Orka account with API access
2. **AWS Account**: For Terraform state storage and CloudWatch monitoring
3. **GitHub Repository**: With required secrets configured
4. **Buildkite Account**: With organization and agent tokens
5. **Required Tools**: Packer, Terraform, AWS CLI, and MacStadium CLI
## Required Secrets
Configure the following secrets in your GitHub repository:
### MacStadium
- `MACSTADIUM_API_KEY`: MacStadium API key
- `ORKA_ENDPOINT`: MacStadium Orka API endpoint
- `ORKA_AUTH_TOKEN`: MacStadium authentication token
### AWS
- `AWS_ACCESS_KEY_ID`: AWS access key ID
- `AWS_SECRET_ACCESS_KEY`: AWS secret access key
### Buildkite
- `BUILDKITE_AGENT_TOKEN`: Buildkite agent token
- `BUILDKITE_API_TOKEN`: Buildkite API token (for monitoring)
- `BUILDKITE_ORG`: Buildkite organization slug
### GitHub
- `GITHUB_TOKEN`: GitHub personal access token (for private repositories)
### Notifications
- `DISCORD_WEBHOOK_URL`: Discord webhook URL for notifications
## Quick Start
### 1. Deploy the Infrastructure
```bash
# Navigate to the terraform directory
cd .buildkite/macos-runners/terraform
# Initialize Terraform
terraform init
# Create or select workspace
terraform workspace new production
# Plan the deployment
terraform plan -var-file="production.tfvars"
# Apply the deployment
terraform apply -var-file="production.tfvars"
```
### 2. Build VM Images
```bash
# Navigate to the packer directory
cd .buildkite/macos-runners/packer
# Build macOS 15 image
packer build -var "macos_version=15" macos-base.pkr.hcl
# Build macOS 14 image
packer build -var "macos_version=14" macos-base.pkr.hcl
# Build macOS 13 image
packer build -var "macos_version=13" macos-base.pkr.hcl
```
### 3. Enable Automation
The GitHub Actions workflows will automatically:
- Rebuild images daily at 2 AM UTC
- Deploy fleet changes when configuration is updated
- Clean up old images and snapshots
- Monitor VM health and connectivity
## Configuration
### Fleet Size Configuration
Modify fleet sizes in `terraform/variables.tf`:
```hcl
variable "fleet_size" {
default = {
macos_13 = 4 # Number of macOS 13 VMs
macos_14 = 6 # Number of macOS 14 VMs
macos_15 = 8 # Number of macOS 15 VMs
}
}
```
### VM Configuration
Adjust VM specifications in `terraform/variables.tf`:
```hcl
variable "vm_configuration" {
default = {
cpu_count = 12 # Number of CPU cores
memory_gb = 32 # Memory in GB
disk_size = 500 # Disk size in GB
}
}
```
### Auto-Scaling Configuration
Configure auto-scaling parameters:
```hcl
variable "autoscaling_config" {
default = {
min_size = 2
max_size = 30
desired_capacity = 10
scale_up_threshold = 80
scale_down_threshold = 20
scale_up_adjustment = 2
scale_down_adjustment = 1
cooldown_period = 300
}
}
```
## Software Included
Each VM image includes:
### Development Tools
- Xcode Command Line Tools
- LLVM/Clang 19.1.7 (exact version)
- CMake 3.30.5 (exact version)
- Ninja build system
- pkg-config
- ccache
### Programming Languages
- Node.js 24.3.0 (exact version, matches bootstrap.sh)
- Bun 1.2.17 (exact version, matches bootstrap.sh)
- Python 3.11 and 3.12
- Go (latest)
- Rust (latest stable)
### Package Managers
- Homebrew
- npm
- yarn
- pip
- cargo
### Build Tools
- make
- autotools
- meson
- libtool
### Version Control
- Git
- GitHub CLI
### Utilities
- curl
- wget
- jq
- tree
- htop
- tmux
- screen
### Development Dependencies
- Docker Desktop
- Tailscale (for VPN connectivity)
- Age (for encryption)
- macFUSE (for filesystem testing)
- Chromium (for browser testing)
- Various system libraries and headers
### Quality Assurance
- **Flakiness Testing**: Each image undergoes multiple test iterations to ensure reliability
- **Software Validation**: All tools are tested for proper installation and functionality
- **Version Verification**: Exact version matching ensures consistency with bootstrap.sh
## User Isolation
Each Buildkite job runs in complete isolation:
1. **Unique User**: Each job gets a unique user account (`bk-<job-id>`)
2. **Isolated Environment**: Separate home directory and environment variables
3. **Process Isolation**: All processes are killed after job completion
4. **File System Cleanup**: Temporary files and caches are cleaned up
5. **Network Isolation**: No shared network resources between jobs
## Monitoring and Alerting
The infrastructure includes comprehensive monitoring:
- **Health Checks**: HTTP health endpoints on each VM
- **CloudWatch Metrics**: CPU, memory, disk usage monitoring
- **Buildkite Integration**: Agent connectivity monitoring
- **Slack Notifications**: Success/failure notifications
- **Log Aggregation**: Centralized logging for troubleshooting
## Security Considerations
- **Encrypted Disks**: All VM disks are encrypted
- **Network Security**: Security groups restrict network access
- **SSH Key Management**: Secure SSH key distribution
- **Regular Updates**: Automatic security updates
- **Process Isolation**: Complete isolation between jobs
- **Secure Credential Handling**: Secrets are managed securely
## Troubleshooting
### Common Issues
1. **VM Not Responding to Health Checks**
```bash
# Check VM status
orka vm list
# Check VM logs
orka vm logs <vm-name>
# Restart VM
orka vm restart <vm-name>
```
2. **Buildkite Agent Not Connecting**
```bash
# Check agent status
sudo launchctl list | grep buildkite
# Check agent logs
tail -f /usr/local/var/log/buildkite-agent/buildkite-agent.log
# Restart agent
sudo launchctl unload /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
```
3. **User Creation Failures**
```bash
# Check user creation logs
tail -f /var/log/system.log | grep "create-build-user"
# Manual cleanup
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh <username>
```
4. **Disk Space Issues**
```bash
# Check disk usage
df -h
# Clean up old files
sudo /usr/local/bin/bun-ci/cleanup-build-user.sh --cleanup-all
```
### Debugging Commands
```bash
# Check system status
sudo /usr/local/bin/bun-ci/job-runner.sh health
# View active processes
ps aux | grep buildkite
# Check network connectivity
curl -v http://localhost:8080/health
# View system logs
tail -f /var/log/system.log
# Check Docker status
docker info
```
## Maintenance
### Regular Tasks
1. **Image Updates**: Images are rebuilt daily automatically
2. **Fleet Updates**: Terraform changes are applied automatically
3. **Cleanup**: Old images and snapshots are cleaned up automatically
4. **Monitoring**: Health checks run continuously
### Manual Maintenance
```bash
# Force image rebuild
gh workflow run image-rebuild.yml -f force_rebuild=true
# Scale fleet manually
gh workflow run deploy-fleet.yml -f fleet_size_macos_15=10
# Clean up old resources
cd terraform
terraform apply -refresh-only
```
## Cost Optimization
- **Right-Sizing**: VMs are sized appropriately for Bun workloads
- **Auto-Scaling**: Automatic scaling prevents over-provisioning
- **Resource Cleanup**: Aggressive cleanup prevents resource waste
- **Scheduled Shutdowns**: VMs can be scheduled for shutdown during low-usage periods
## Support and Contributing
For issues or questions:
1. Check the troubleshooting section above
2. Review GitHub Actions workflow logs
3. Check MacStadium Orka console
4. Open an issue in the repository
When contributing:
1. Test changes in a staging environment first
2. Update documentation as needed
3. Follow the existing code style
4. Add appropriate tests and validation
## License
This infrastructure code is part of the Bun project and follows the same license terms.

View File

@@ -1,376 +0,0 @@
name: Deploy macOS Runner Fleet
on:
workflow_dispatch:
inputs:
environment:
description: 'Deployment environment'
required: true
default: 'production'
type: choice
options:
- production
- staging
- development
fleet_size_macos_13:
description: 'Number of macOS 13 VMs'
required: false
default: '4'
fleet_size_macos_14:
description: 'Number of macOS 14 VMs'
required: false
default: '6'
fleet_size_macos_15:
description: 'Number of macOS 15 VMs'
required: false
default: '8'
force_deploy:
description: 'Force deployment even if no changes'
required: false
default: false
type: boolean
env:
TERRAFORM_VERSION: "1.6.0"
AWS_REGION: "us-west-2"
jobs:
validate-inputs:
runs-on: ubuntu-latest
outputs:
validated: ${{ steps.validate.outputs.validated }}
total_vms: ${{ steps.validate.outputs.total_vms }}
steps:
- name: Validate inputs
id: validate
run: |
# Validate fleet sizes
macos_13="${{ github.event.inputs.fleet_size_macos_13 }}"
macos_14="${{ github.event.inputs.fleet_size_macos_14 }}"
macos_15="${{ github.event.inputs.fleet_size_macos_15 }}"
# Check if inputs are valid numbers
if ! [[ "$macos_13" =~ ^[0-9]+$ ]] || ! [[ "$macos_14" =~ ^[0-9]+$ ]] || ! [[ "$macos_15" =~ ^[0-9]+$ ]]; then
echo "Error: Fleet sizes must be valid numbers"
exit 1
fi
# Check if at least one VM is requested
total_vms=$((macos_13 + macos_14 + macos_15))
if [[ $total_vms -eq 0 ]]; then
echo "Error: At least one VM must be requested"
exit 1
fi
# Check reasonable limits
if [[ $total_vms -gt 50 ]]; then
echo "Error: Total VMs cannot exceed 50"
exit 1
fi
echo "validated=true" >> $GITHUB_OUTPUT
echo "total_vms=$total_vms" >> $GITHUB_OUTPUT
echo "Validation passed:"
echo "- macOS 13: $macos_13 VMs"
echo "- macOS 14: $macos_14 VMs"
echo "- macOS 15: $macos_15 VMs"
echo "- Total: $total_vms VMs"
plan-deployment:
runs-on: ubuntu-latest
needs: validate-inputs
if: needs.validate-inputs.outputs.validated == 'true'
outputs:
plan_status: ${{ steps.plan.outputs.plan_status }}
has_changes: ${{ steps.plan.outputs.has_changes }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Initialize Terraform
working-directory: .buildkite/macos-runners/terraform
run: |
terraform init
terraform workspace select ${{ github.event.inputs.environment }} || terraform workspace new ${{ github.event.inputs.environment }}
- name: Create terraform variables file
working-directory: .buildkite/macos-runners/terraform
run: |
cat > terraform.tfvars << EOF
environment = "${{ github.event.inputs.environment }}"
fleet_size = {
macos_13 = ${{ github.event.inputs.fleet_size_macos_13 }}
macos_14 = ${{ github.event.inputs.fleet_size_macos_14 }}
macos_15 = ${{ github.event.inputs.fleet_size_macos_15 }}
}
EOF
- name: Plan Terraform deployment
id: plan
working-directory: .buildkite/macos-runners/terraform
run: |
set -e
# Run terraform plan
terraform plan \
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
-out=tfplan \
-detailed-exitcode > plan_output.txt 2>&1
plan_exit_code=$?
# Check plan results
if [[ $plan_exit_code -eq 0 ]]; then
echo "plan_status=no_changes" >> $GITHUB_OUTPUT
echo "has_changes=false" >> $GITHUB_OUTPUT
elif [[ $plan_exit_code -eq 2 ]]; then
echo "plan_status=has_changes" >> $GITHUB_OUTPUT
echo "has_changes=true" >> $GITHUB_OUTPUT
else
echo "plan_status=failed" >> $GITHUB_OUTPUT
echo "has_changes=false" >> $GITHUB_OUTPUT
cat plan_output.txt
exit 1
fi
# Save plan output
echo "Plan output:"
cat plan_output.txt
- name: Upload plan
uses: actions/upload-artifact@v4
with:
name: terraform-plan
path: |
.buildkite/macos-runners/terraform/tfplan
.buildkite/macos-runners/terraform/plan_output.txt
retention-days: 30
deploy:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment]
if: needs.plan-deployment.outputs.has_changes == 'true' || github.event.inputs.force_deploy == 'true'
environment: ${{ github.event.inputs.environment }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Download plan
uses: actions/download-artifact@v4
with:
name: terraform-plan
path: .buildkite/macos-runners/terraform/
- name: Initialize Terraform
working-directory: .buildkite/macos-runners/terraform
run: |
terraform init
terraform workspace select ${{ github.event.inputs.environment }}
- name: Apply Terraform deployment
working-directory: .buildkite/macos-runners/terraform
run: |
echo "Applying Terraform deployment..."
terraform apply -auto-approve tfplan
- name: Get deployment outputs
working-directory: .buildkite/macos-runners/terraform
run: |
terraform output -json > terraform-outputs.json
echo "Deployment outputs:"
cat terraform-outputs.json | jq .
- name: Upload deployment outputs
uses: actions/upload-artifact@v4
with:
name: deployment-outputs-${{ github.event.inputs.environment }}
path: .buildkite/macos-runners/terraform/terraform-outputs.json
retention-days: 90
- name: Verify deployment
working-directory: .buildkite/macos-runners/terraform
run: |
echo "Verifying deployment..."
# Check VM count
vm_count=$(terraform output -json vm_instances | jq 'length')
expected_count=${{ needs.validate-inputs.outputs.total_vms }}
if [[ $vm_count -eq $expected_count ]]; then
echo "✅ VM count matches expected: $vm_count"
else
echo "❌ VM count mismatch: expected $expected_count, got $vm_count"
exit 1
fi
# Check VM states
terraform output -json vm_instances | jq -r 'to_entries[] | "\(.key): \(.value.name) - \(.value.status)"' | while read vm_info; do
echo "VM: $vm_info"
done
health-check:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment, deploy]
if: always() && needs.deploy.result == 'success'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup dependencies
run: |
sudo apt-get update
sudo apt-get install -y jq curl
- name: Download deployment outputs
uses: actions/download-artifact@v4
with:
name: deployment-outputs-${{ github.event.inputs.environment }}
path: ./
- name: Wait for VMs to be ready
run: |
echo "Waiting for VMs to be ready..."
sleep 300 # Wait 5 minutes for VMs to initialize
- name: Check VM health
run: |
echo "Checking VM health..."
# Read VM details from outputs
jq -r '.vm_instances.value | to_entries[] | "\(.value.name) \(.value.ip_address)"' terraform-outputs.json | while read vm_name vm_ip; do
echo "Checking VM: $vm_name ($vm_ip)"
# Check health endpoint
max_attempts=12
attempt=1
while [[ $attempt -le $max_attempts ]]; do
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
echo "✅ $vm_name is healthy"
break
else
echo "⏳ $vm_name not ready yet (attempt $attempt/$max_attempts)"
sleep 30
((attempt++))
fi
done
if [[ $attempt -gt $max_attempts ]]; then
echo "❌ $vm_name failed health check"
fi
done
- name: Check Buildkite connectivity
run: |
echo "Checking Buildkite agent connectivity..."
# Wait a bit more for agents to connect
sleep 60
# Check connected agents
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state) \(.hostname)"' | \
while read agent_name state hostname; do
echo "Agent: $agent_name - State: $state - Host: $hostname"
done
notify-success:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment, deploy, health-check]
if: always() && needs.deploy.result == 'success'
steps:
- name: Notify success
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: success
title: "macOS runner fleet deployed successfully"
description: |
🚀 **macOS runner fleet deployed successfully**
**Environment:** ${{ github.event.inputs.environment }}
**Total VMs:** ${{ needs.validate-inputs.outputs.total_vms }}
**Fleet composition:**
- macOS 13: ${{ github.event.inputs.fleet_size_macos_13 }} VMs
- macOS 14: ${{ github.event.inputs.fleet_size_macos_14 }} VMs
- macOS 15: ${{ github.event.inputs.fleet_size_macos_15 }} VMs
**Repository:** ${{ github.repository }}
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0x00ff00
username: "GitHub Actions"
notify-failure:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment, deploy, health-check]
if: always() && (needs.validate-inputs.result == 'failure' || needs.plan-deployment.result == 'failure' || needs.deploy.result == 'failure')
steps:
- name: Notify failure
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: failure
title: "macOS runner fleet deployment failed"
description: |
🔴 **macOS runner fleet deployment failed**
**Environment:** ${{ github.event.inputs.environment }}
**Failed stage:** ${{ needs.validate-inputs.result == 'failure' && 'Validation' || needs.plan-deployment.result == 'failure' && 'Planning' || 'Deployment' }}
**Repository:** ${{ github.repository }}
[View Deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
Please check the logs for more details.
color: 0xff0000
username: "GitHub Actions"
notify-no-changes:
runs-on: ubuntu-latest
needs: [validate-inputs, plan-deployment]
if: needs.plan-deployment.outputs.has_changes == 'false' && github.event.inputs.force_deploy != 'true'
steps:
- name: Notify no changes
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: cancelled
title: "macOS runner fleet deployment skipped"
description: |
**macOS runner fleet deployment skipped** - no changes detected in Terraform plan
color: 0x808080
username: "GitHub Actions"

View File

@@ -1,515 +0,0 @@
name: Rebuild macOS Runner Images
on:
schedule:
# Run daily at 2 AM UTC
- cron: '0 2 * * *'
workflow_dispatch:
inputs:
macos_versions:
description: 'macOS versions to rebuild (comma-separated: 13,14,15)'
required: false
default: '13,14,15'
force_rebuild:
description: 'Force rebuild even if no changes detected'
required: false
default: 'false'
type: boolean
env:
PACKER_VERSION: "1.9.4"
TERRAFORM_VERSION: "1.6.0"
jobs:
check-changes:
runs-on: ubuntu-latest
outputs:
should_rebuild: ${{ steps.check.outputs.should_rebuild }}
changed_files: ${{ steps.check.outputs.changed_files }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Check for changes
id: check
run: |
# Check if any relevant files have changed in the last 24 hours
changed_files=$(git diff --name-only HEAD~1 HEAD | grep -E "(bootstrap|packer|\.buildkite/macos-runners)" | head -20)
if [[ -n "$changed_files" ]] || [[ "${{ github.event.inputs.force_rebuild }}" == "true" ]]; then
echo "should_rebuild=true" >> $GITHUB_OUTPUT
echo "changed_files<<EOF" >> $GITHUB_OUTPUT
echo "$changed_files" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
else
echo "should_rebuild=false" >> $GITHUB_OUTPUT
echo "changed_files=" >> $GITHUB_OUTPUT
fi
build-images:
runs-on: ubuntu-latest
needs: check-changes
if: needs.check-changes.outputs.should_rebuild == 'true'
strategy:
matrix:
macos_version: [13, 14, 15]
fail-fast: false
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Packer
uses: hashicorp/setup-packer@main
with:
version: ${{ env.PACKER_VERSION }}
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y jq curl
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Validate Packer configuration
working-directory: .buildkite/macos-runners/packer
run: |
packer validate \
-var "macos_version=${{ matrix.macos_version }}" \
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
macos-base.pkr.hcl
- name: Build macOS ${{ matrix.macos_version }} image
working-directory: .buildkite/macos-runners/packer
run: |
echo "Building macOS ${{ matrix.macos_version }} image..."
# Set build variables
export PACKER_LOG=1
export PACKER_LOG_PATH="./packer-build-macos-${{ matrix.macos_version }}.log"
# Build the image
packer build \
-var "macos_version=${{ matrix.macos_version }}" \
-var "orka_endpoint=${{ secrets.ORKA_ENDPOINT }}" \
-var "orka_auth_token=${{ secrets.ORKA_AUTH_TOKEN }}" \
-var "base_image=base-images/macos-${{ matrix.macos_version }}-$([ ${{ matrix.macos_version }} -eq 13 ] && echo 'ventura' || [ ${{ matrix.macos_version }} -eq 14 ] && echo 'sonoma' || echo 'sequoia')" \
macos-base.pkr.hcl
- name: Validate built image
working-directory: .buildkite/macos-runners/packer
run: |
echo "Validating built image..."
# Get the latest built image ID
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
if [ -z "$IMAGE_ID" ]; then
echo "❌ No image found for macOS ${{ matrix.macos_version }}"
exit 1
fi
echo "✅ Found image: $IMAGE_ID"
# Create a test VM to validate the image
VM_NAME="test-validation-${{ matrix.macos_version }}-$(date +%s)"
echo "Creating test VM: $VM_NAME"
orka vm create \
--name "$VM_NAME" \
--image "$IMAGE_ID" \
--cpu 4 \
--memory 8 \
--wait
# Wait for VM to be ready
sleep 60
# Get VM IP
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
echo "Testing VM at IP: $VM_IP"
# Test software installations
echo "Testing software installations..."
# Test Node.js
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'node --version' || exit 1
# Test Bun
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'bun --version' || exit 1
# Test build tools
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'cmake --version' || exit 1
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'clang --version' || exit 1
# Test Docker
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'docker --version' || exit 1
# Test Tailscale
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'tailscale --version' || exit 1
# Test health endpoint
ssh -o StrictHostKeyChecking=no admin@$VM_IP 'curl -f http://localhost:8080/health' || exit 1
echo "✅ All software validations passed"
# Clean up test VM
orka vm delete "$VM_NAME" --force
echo "✅ Image validation completed successfully"
- name: Run flakiness checks
working-directory: .buildkite/macos-runners/packer
run: |
echo "Running flakiness checks..."
# Get the latest built image ID
IMAGE_ID=$(orka image list --output json | jq -r '.[] | select(.name | test("^bun-macos-${{ matrix.macos_version }}-")) | .id' | head -1)
# Run multiple test iterations to check for flakiness
ITERATIONS=3
PASSED=0
FAILED=0
for i in $(seq 1 $ITERATIONS); do
echo "Running flakiness test iteration $i/$ITERATIONS..."
VM_NAME="flakiness-test-${{ matrix.macos_version }}-$i-$(date +%s)"
# Create test VM
orka vm create \
--name "$VM_NAME" \
--image "$IMAGE_ID" \
--cpu 4 \
--memory 8 \
--wait
sleep 30
# Get VM IP
VM_IP=$(orka vm show "$VM_NAME" --output json | jq -r '.ip_address')
# Run a series of quick tests
TEST_PASSED=true
# Test 1: Basic command execution
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'echo "test" > /tmp/test.txt && cat /tmp/test.txt'; then
echo "❌ Basic command test failed"
TEST_PASSED=false
fi
# Test 2: Node.js execution
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'node -e "console.log(\"Node.js test\")"'; then
echo "❌ Node.js test failed"
TEST_PASSED=false
fi
# Test 3: Bun execution
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'bun -e "console.log(\"Bun test\")"'; then
echo "❌ Bun test failed"
TEST_PASSED=false
fi
# Test 4: Build tools
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'clang --version > /tmp/clang_version.txt'; then
echo "❌ Clang test failed"
TEST_PASSED=false
fi
# Test 5: File system operations
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'mkdir -p /tmp/test_dir && touch /tmp/test_dir/test_file'; then
echo "❌ File system test failed"
TEST_PASSED=false
fi
# Test 6: Process creation
if ! ssh -o StrictHostKeyChecking=no -o ConnectTimeout=30 admin@$VM_IP 'ps aux | grep -v grep | wc -l'; then
echo "❌ Process test failed"
TEST_PASSED=false
fi
# Clean up test VM
orka vm delete "$VM_NAME" --force
if [ "$TEST_PASSED" = true ]; then
echo "✅ Iteration $i passed"
PASSED=$((PASSED + 1))
else
echo "❌ Iteration $i failed"
FAILED=$((FAILED + 1))
fi
# Short delay between iterations
sleep 10
done
echo "Flakiness check results:"
echo "- Passed: $PASSED/$ITERATIONS"
echo "- Failed: $FAILED/$ITERATIONS"
# Calculate success rate
SUCCESS_RATE=$((PASSED * 100 / ITERATIONS))
echo "- Success rate: $SUCCESS_RATE%"
# Fail if success rate is below 80%
if [ $SUCCESS_RATE -lt 80 ]; then
echo "❌ Image is too flaky! Success rate: $SUCCESS_RATE% (minimum: 80%)"
exit 1
fi
echo "✅ Flakiness checks passed with $SUCCESS_RATE% success rate"
- name: Upload build logs
if: always()
uses: actions/upload-artifact@v4
with:
name: packer-logs-macos-${{ matrix.macos_version }}
path: .buildkite/macos-runners/packer/packer-build-macos-${{ matrix.macos_version }}.log
retention-days: 7
- name: Notify on failure
if: failure()
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: failure
title: "macOS ${{ matrix.macos_version }} image build failed"
description: |
🔴 **macOS ${{ matrix.macos_version }} image build failed**
**Repository:** ${{ github.repository }}
**Branch:** ${{ github.ref }}
**Commit:** ${{ github.sha }}
[Check the logs](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0xff0000
username: "GitHub Actions"
update-terraform:
runs-on: ubuntu-latest
needs: [check-changes, build-images]
if: needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Terraform
uses: hashicorp/setup-terraform@v3
with:
terraform_version: ${{ env.TERRAFORM_VERSION }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Initialize Terraform
working-directory: .buildkite/macos-runners/terraform
run: |
terraform init
terraform workspace select production || terraform workspace new production
- name: Plan Terraform changes
working-directory: .buildkite/macos-runners/terraform
run: |
terraform plan \
-var "macstadium_api_key=${{ secrets.MACSTADIUM_API_KEY }}" \
-var "buildkite_agent_token=${{ secrets.BUILDKITE_AGENT_TOKEN }}" \
-var "github_token=${{ secrets.GITHUB_TOKEN }}" \
-out=tfplan
- name: Apply Terraform changes
working-directory: .buildkite/macos-runners/terraform
run: |
terraform apply -auto-approve tfplan
- name: Save Terraform outputs
working-directory: .buildkite/macos-runners/terraform
run: |
terraform output -json > terraform-outputs.json
- name: Upload Terraform outputs
uses: actions/upload-artifact@v4
with:
name: terraform-outputs
path: .buildkite/macos-runners/terraform/terraform-outputs.json
retention-days: 30
cleanup-old-images:
runs-on: ubuntu-latest
needs: [check-changes, build-images, update-terraform]
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup AWS CLI
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Install MacStadium CLI
run: |
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
sudo mv orka-cli /usr/local/bin/orka
chmod +x /usr/local/bin/orka
- name: Configure MacStadium CLI
run: |
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
- name: Clean up old images
run: |
echo "Cleaning up old images..."
# Get list of all images
orka image list --output json > images.json
# Find images older than 7 days
cutoff_date=$(date -d '7 days ago' +%s)
# Parse and delete old images
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' images.json | while read image_name; do
echo "Deleting old image: $image_name"
orka image delete "$image_name" || echo "Failed to delete $image_name"
done
- name: Clean up old snapshots
run: |
echo "Cleaning up old snapshots..."
# Get list of all snapshots
orka snapshot list --output json > snapshots.json
# Find snapshots older than 7 days
cutoff_date=$(date -d '7 days ago' +%s)
# Parse and delete old snapshots
jq -r '.[] | select(.name | test("^bun-macos-")) | select(.created_at | strptime("%Y-%m-%dT%H:%M:%SZ") | mktime < '$cutoff_date') | .name' snapshots.json | while read snapshot_name; do
echo "Deleting old snapshot: $snapshot_name"
orka snapshot delete "$snapshot_name" || echo "Failed to delete $snapshot_name"
done
health-check:
runs-on: ubuntu-latest
needs: [check-changes, build-images, update-terraform]
if: always() && needs.check-changes.outputs.should_rebuild == 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup AWS CLI
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Install MacStadium CLI
run: |
curl -L "https://github.com/macstadium/orka-cli/releases/latest/download/orka-cli-linux-amd64.tar.gz" | tar -xz
sudo mv orka-cli /usr/local/bin/orka
chmod +x /usr/local/bin/orka
- name: Configure MacStadium CLI
run: |
orka config set endpoint ${{ secrets.ORKA_ENDPOINT }}
orka auth token ${{ secrets.ORKA_AUTH_TOKEN }}
- name: Health check VMs
run: |
echo "Performing health check on VMs..."
# Get list of running VMs
orka vm list --output json > vms.json
# Check each VM
jq -r '.[] | select(.name | test("^bun-runner-")) | select(.status == "running") | "\(.name) \(.ip_address)"' vms.json | while read vm_name vm_ip; do
echo "Checking VM: $vm_name ($vm_ip)"
# Check if VM is responding to health checks
if curl -f -s --max-time 30 "http://$vm_ip:8080/health" > /dev/null; then
echo "✅ $vm_name is healthy"
else
echo "❌ $vm_name is not responding to health checks"
fi
done
- name: Check Buildkite agent connectivity
run: |
echo "Checking Buildkite agent connectivity..."
# Use Buildkite API to check connected agents
curl -s -H "Authorization: Bearer ${{ secrets.BUILDKITE_API_TOKEN }}" \
"https://api.buildkite.com/v2/organizations/${{ secrets.BUILDKITE_ORG }}/agents" | \
jq -r '.[] | select(.name | test("^bun-runner-")) | "\(.name) \(.connection_state)"' | \
while read agent_name state; do
echo "Agent: $agent_name - State: $state"
done
notify-success:
runs-on: ubuntu-latest
needs: [check-changes, build-images, update-terraform, cleanup-old-images, health-check]
if: always() && needs.check-changes.outputs.should_rebuild == 'true' && needs.build-images.result == 'success'
steps:
- name: Notify success
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: success
title: "macOS runner images rebuilt successfully"
description: |
✅ **macOS runner images rebuilt successfully**
**Repository:** ${{ github.repository }}
**Branch:** ${{ github.ref }}
**Commit:** ${{ github.sha }}
**Changes detected in:**
${{ needs.check-changes.outputs.changed_files }}
**Images built:** ${{ join(github.event.inputs.macos_versions || '13,14,15', ', ') }}
[Check the deployment](${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }})
color: 0x00ff00
username: "GitHub Actions"
notify-skip:
runs-on: ubuntu-latest
needs: check-changes
if: needs.check-changes.outputs.should_rebuild == 'false'
steps:
- name: Notify skip
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_URL }}
status: cancelled
title: "macOS runner image rebuild skipped"
description: |
**macOS runner image rebuild skipped** - no changes detected in the last 24 hours
color: 0x808080
username: "GitHub Actions"

View File

@@ -1,270 +0,0 @@
packer {
required_plugins {
macstadium-orka = {
version = ">= 3.0.0"
source = "github.com/macstadium/macstadium-orka"
}
}
}
variable "orka_endpoint" {
description = "MacStadium Orka endpoint"
type = string
default = env("ORKA_ENDPOINT")
}
variable "orka_auth_token" {
description = "MacStadium Orka auth token"
type = string
default = env("ORKA_AUTH_TOKEN")
sensitive = true
}
variable "base_image" {
description = "Base macOS image to use"
type = string
default = "base-images/macos-15-sequoia"
}
variable "macos_version" {
description = "macOS version (13, 14, 15)"
type = string
default = "15"
}
variable "cpu_count" {
description = "Number of CPU cores"
type = number
default = 12
}
variable "memory_gb" {
description = "Memory in GB"
type = number
default = 32
}
source "macstadium-orka" "base" {
orka_endpoint = var.orka_endpoint
orka_auth_token = var.orka_auth_token
source_image = var.base_image
image_name = "bun-macos-${var.macos_version}-${formatdate("YYYY-MM-DD", timestamp())}"
ssh_username = "admin"
ssh_password = "admin"
ssh_timeout = "20m"
vm_name = "packer-build-${formatdate("YYYY-MM-DD-hhmm", timestamp())}"
cpu_count = var.cpu_count
memory_gb = var.memory_gb
# Enable GPU acceleration for better performance
gpu_passthrough = true
# Network configuration
vnc_bind_address = "0.0.0.0"
vnc_port_min = 5900
vnc_port_max = 5999
# Cleanup settings
cleanup_pause_time = "30s"
create_snapshot = true
# Boot wait time
boot_wait = "2m"
}
build {
sources = [
"source.macstadium-orka.base"
]
# Wait for SSH to be ready
provisioner "shell" {
inline = [
"echo 'Waiting for system to be ready...'",
"until ping -c1 google.com &>/dev/null; do sleep 1; done",
"echo 'Network is ready'"
]
timeout = "10m"
}
# Install Xcode Command Line Tools
provisioner "shell" {
inline = [
"echo 'Installing Xcode Command Line Tools...'",
"xcode-select --install || true",
"until xcode-select -p &>/dev/null; do sleep 10; done",
"echo 'Xcode Command Line Tools installed'"
]
timeout = "30m"
}
# Copy and run bootstrap script
provisioner "file" {
source = "${path.root}/../scripts/bootstrap-macos.sh"
destination = "/tmp/bootstrap-macos.sh"
}
provisioner "shell" {
inline = [
"chmod +x /tmp/bootstrap-macos.sh",
"sudo /tmp/bootstrap-macos.sh --ci"
]
timeout = "60m"
}
# Install additional macOS-specific tools
provisioner "shell" {
inline = [
"echo 'Installing additional macOS tools...'",
"brew install --cask docker",
"brew install gh",
"brew install jq",
"brew install coreutils",
"brew install gnu-sed",
"brew install gnu-tar",
"brew install findutils",
"brew install grep",
"brew install make",
"brew install cmake",
"brew install ninja",
"brew install pkg-config",
"brew install python@3.11",
"brew install python@3.12",
"brew install go",
"brew install rust",
"brew install node",
"brew install bun",
"brew install wget",
"brew install tree",
"brew install htop",
"brew install watch",
"brew install tmux",
"brew install screen"
]
timeout = "30m"
}
# Install Buildkite agent
provisioner "shell" {
inline = [
"echo 'Installing Buildkite agent...'",
"brew install buildkite/buildkite/buildkite-agent",
"sudo mkdir -p /usr/local/var/buildkite-agent",
"sudo mkdir -p /usr/local/var/log/buildkite-agent",
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent"
]
timeout = "10m"
}
# Copy user management scripts
provisioner "file" {
source = "${path.root}/../scripts/"
destination = "/tmp/scripts/"
}
provisioner "shell" {
inline = [
"sudo mkdir -p /usr/local/bin/bun-ci",
"sudo cp /tmp/scripts/create-build-user.sh /usr/local/bin/bun-ci/",
"sudo cp /tmp/scripts/cleanup-build-user.sh /usr/local/bin/bun-ci/",
"sudo cp /tmp/scripts/job-runner.sh /usr/local/bin/bun-ci/",
"sudo chmod +x /usr/local/bin/bun-ci/*.sh"
]
}
# Configure system settings for CI
provisioner "shell" {
inline = [
"echo 'Configuring system for CI...'",
"# Disable sleep and screensaver",
"sudo pmset -a displaysleep 0 sleep 0 disksleep 0",
"sudo pmset -a womp 1",
"# Disable automatic updates",
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false",
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false",
"sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false",
"# Increase file descriptor limits",
"echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf",
"echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf",
"# Enable core dumps",
"sudo mkdir -p /cores",
"sudo chmod 777 /cores",
"echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf"
]
}
# Configure LaunchDaemon for Buildkite agent
provisioner "shell" {
inline = [
"echo 'Configuring Buildkite LaunchDaemon...'",
"sudo tee /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist > /dev/null <<EOF",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>",
"<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">",
"<plist version=\"1.0\">",
"<dict>",
" <key>Label</key>",
" <string>com.buildkite.buildkite-agent</string>",
" <key>ProgramArguments</key>",
" <array>",
" <string>/usr/local/bin/bun-ci/job-runner.sh</string>",
" </array>",
" <key>RunAtLoad</key>",
" <true/>",
" <key>KeepAlive</key>",
" <true/>",
" <key>StandardOutPath</key>",
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.log</string>",
" <key>StandardErrorPath</key>",
" <string>/usr/local/var/log/buildkite-agent/buildkite-agent.error.log</string>",
" <key>EnvironmentVariables</key>",
" <dict>",
" <key>PATH</key>",
" <string>/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>",
" </dict>",
"</dict>",
"</plist>",
"EOF"
]
}
# Clean up
provisioner "shell" {
inline = [
"echo 'Cleaning up...'",
"rm -rf /tmp/bootstrap-macos.sh /tmp/scripts/",
"sudo rm -rf /var/log/*.log /var/log/*/*.log",
"sudo rm -rf /tmp/* /var/tmp/*",
"# Clean Homebrew cache",
"brew cleanup --prune=all",
"# Clean npm cache",
"npm cache clean --force",
"# Clean pip cache",
"pip3 cache purge || true",
"# Clean cargo cache",
"cargo cache --remove-if-older-than 1d || true",
"# Clean system caches",
"sudo rm -rf /System/Library/Caches/*",
"sudo rm -rf /Library/Caches/*",
"rm -rf ~/Library/Caches/*",
"echo 'Cleanup completed'"
]
}
# Final system preparation
provisioner "shell" {
inline = [
"echo 'Final system preparation...'",
"# Ensure proper permissions",
"sudo chown -R admin:admin /usr/local/bin/bun-ci",
"sudo chown -R admin:admin /usr/local/var/buildkite-agent",
"sudo chown -R admin:admin /usr/local/var/log/buildkite-agent",
"# Load the LaunchDaemon",
"sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist",
"echo 'Image preparation completed'"
]
}
}

View File

@@ -1,400 +0,0 @@
#!/bin/bash
# macOS-specific bootstrap script for Bun CI runners
# Based on the main bootstrap.sh but optimized for macOS CI environments
set -euo pipefail
print() {
echo "$@"
}
error() {
print "error: $@" >&2
exit 1
}
execute() {
print "$ $@" >&2
if ! "$@"; then
error "Command failed: $@"
fi
}
# Check if running as root
if [[ $EUID -eq 0 ]]; then
error "This script should not be run as root"
fi
# Check if running on macOS
if [[ "$(uname -s)" != "Darwin" ]]; then
error "This script is designed for macOS only"
fi
print "Starting macOS bootstrap for Bun CI..."
# Get macOS version
MACOS_VERSION=$(sw_vers -productVersion)
MACOS_MAJOR=$(echo "$MACOS_VERSION" | cut -d. -f1)
MACOS_MINOR=$(echo "$MACOS_VERSION" | cut -d. -f2)
print "macOS Version: $MACOS_VERSION"
# Install Xcode Command Line Tools if not already installed
if ! xcode-select -p &>/dev/null; then
print "Installing Xcode Command Line Tools..."
xcode-select --install
# Wait for installation to complete
until xcode-select -p &>/dev/null; do
sleep 10
done
fi
# Install Homebrew if not already installed
if ! command -v brew &>/dev/null; then
print "Installing Homebrew..."
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
# Add Homebrew to PATH
if [[ "$(uname -m)" == "arm64" ]]; then
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> ~/.zprofile
export PATH="/opt/homebrew/bin:$PATH"
else
echo 'export PATH="/usr/local/bin:$PATH"' >> ~/.zprofile
export PATH="/usr/local/bin:$PATH"
fi
fi
# Configure Homebrew for CI
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_AUTO_UPDATE=1
export HOMEBREW_NO_ANALYTICS=1
# Update Homebrew
print "Updating Homebrew..."
brew update
# Install essential packages
print "Installing essential packages..."
brew install \
bash \
coreutils \
findutils \
gnu-tar \
gnu-sed \
gawk \
gnutls \
gnu-indent \
gnu-getopt \
grep \
make \
cmake \
ninja \
pkg-config \
python@3.11 \
python@3.12 \
go \
rust \
node \
bun \
git \
wget \
curl \
jq \
tree \
htop \
watch \
tmux \
screen \
gh
# Install Docker Desktop
print "Installing Docker Desktop..."
if [[ ! -d "/Applications/Docker.app" ]]; then
if [[ "$(uname -m)" == "arm64" ]]; then
curl -L "https://desktop.docker.com/mac/main/arm64/Docker.dmg" -o /tmp/Docker.dmg
else
curl -L "https://desktop.docker.com/mac/main/amd64/Docker.dmg" -o /tmp/Docker.dmg
fi
hdiutil attach /tmp/Docker.dmg
cp -R /Volumes/Docker/Docker.app /Applications/
hdiutil detach /Volumes/Docker
rm /tmp/Docker.dmg
fi
# Install Buildkite agent
print "Installing Buildkite agent..."
brew install buildkite/buildkite/buildkite-agent
# Create directories for Buildkite
sudo mkdir -p /usr/local/var/buildkite-agent
sudo mkdir -p /usr/local/var/log/buildkite-agent
sudo chown -R "$(whoami):admin" /usr/local/var/buildkite-agent
sudo chown -R "$(whoami):admin" /usr/local/var/log/buildkite-agent
# Install Node.js versions (exact version from bootstrap.sh)
print "Installing specific Node.js version..."
NODE_VERSION="24.3.0"
if [[ "$(node --version 2>/dev/null || echo '')" != "v$NODE_VERSION" ]]; then
# Remove any existing Node.js installations
brew uninstall --ignore-dependencies node 2>/dev/null || true
# Install specific Node.js version
if [[ "$(uname -m)" == "arm64" ]]; then
NODE_ARCH="arm64"
else
NODE_ARCH="x64"
fi
NODE_URL="https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
NODE_TAR="/tmp/node-v$NODE_VERSION-darwin-$NODE_ARCH.tar.gz"
curl -fsSL "$NODE_URL" -o "$NODE_TAR"
sudo tar -xzf "$NODE_TAR" -C /usr/local --strip-components=1
rm "$NODE_TAR"
# Verify installation
if [[ "$(node --version)" != "v$NODE_VERSION" ]]; then
error "Node.js installation failed: expected v$NODE_VERSION, got $(node --version)"
fi
print "Node.js v$NODE_VERSION installed successfully"
fi
# Install Node.js headers (matching bootstrap.sh)
print "Installing Node.js headers..."
NODE_HEADERS_URL="https://nodejs.org/download/release/v$NODE_VERSION/node-v$NODE_VERSION-headers.tar.gz"
NODE_HEADERS_TAR="/tmp/node-v$NODE_VERSION-headers.tar.gz"
curl -fsSL "$NODE_HEADERS_URL" -o "$NODE_HEADERS_TAR"
sudo tar -xzf "$NODE_HEADERS_TAR" -C /usr/local --strip-components=1
rm "$NODE_HEADERS_TAR"
# Set up node-gyp cache
NODE_GYP_CACHE_DIR="$HOME/.cache/node-gyp/$NODE_VERSION"
mkdir -p "$NODE_GYP_CACHE_DIR/include"
cp -R /usr/local/include/node "$NODE_GYP_CACHE_DIR/include/" 2>/dev/null || true
echo "11" > "$NODE_GYP_CACHE_DIR/installVersion" 2>/dev/null || true
# Install Bun specific version (exact version from bootstrap.sh)
print "Installing specific Bun version..."
BUN_VERSION="1.2.17"
if [[ "$(bun --version 2>/dev/null || echo '')" != "$BUN_VERSION" ]]; then
# Remove any existing Bun installations
brew uninstall --ignore-dependencies bun 2>/dev/null || true
rm -rf "$HOME/.bun" 2>/dev/null || true
# Install specific Bun version
if [[ "$(uname -m)" == "arm64" ]]; then
BUN_TRIPLET="bun-darwin-aarch64"
else
BUN_TRIPLET="bun-darwin-x64"
fi
BUN_URL="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$BUN_VERSION/$BUN_TRIPLET.zip"
BUN_ZIP="/tmp/$BUN_TRIPLET.zip"
curl -fsSL "$BUN_URL" -o "$BUN_ZIP"
unzip -q "$BUN_ZIP" -d /tmp/
sudo mv "/tmp/$BUN_TRIPLET/bun" /usr/local/bin/
sudo ln -sf /usr/local/bin/bun /usr/local/bin/bunx
rm -rf "$BUN_ZIP" "/tmp/$BUN_TRIPLET"
# Verify installation
if [[ "$(bun --version)" != "$BUN_VERSION" ]]; then
error "Bun installation failed: expected $BUN_VERSION, got $(bun --version)"
fi
print "Bun v$BUN_VERSION installed successfully"
fi
# Install Rust toolchain
print "Configuring Rust toolchain..."
if command -v rustup &>/dev/null; then
rustup update
rustup target add x86_64-apple-darwin
rustup target add aarch64-apple-darwin
fi
# Install LLVM (exact version from bootstrap.sh)
print "Installing LLVM..."
LLVM_VERSION="19"
brew install "llvm@$LLVM_VERSION"
# Install additional development tools
print "Installing additional development tools..."
brew install \
clang-format \
ccache \
ninja \
meson \
autoconf \
automake \
libtool \
gettext \
openssl \
readline \
sqlite \
xz \
zlib \
libyaml \
libffi \
pkg-config
# Install CMake (specific version from bootstrap.sh)
print "Installing CMake..."
CMAKE_VERSION="3.30.5"
brew uninstall --ignore-dependencies cmake 2>/dev/null || true
if [[ "$(uname -m)" == "arm64" ]]; then
CMAKE_ARCH="macos-universal"
else
CMAKE_ARCH="macos-universal"
fi
CMAKE_URL="https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
CMAKE_TAR="/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH.tar.gz"
curl -fsSL "$CMAKE_URL" -o "$CMAKE_TAR"
tar -xzf "$CMAKE_TAR" -C /tmp/
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/bin/"* /usr/local/bin/
sudo cp -R "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH/CMake.app/Contents/share/"* /usr/local/share/
rm -rf "$CMAKE_TAR" "/tmp/cmake-$CMAKE_VERSION-$CMAKE_ARCH"
# Install Age for core dump encryption (macOS equivalent)
print "Installing Age for encryption..."
if [[ "$(uname -m)" == "arm64" ]]; then
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-arm64.tar.gz"
AGE_SHA256="4a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
else
AGE_URL="https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-darwin-amd64.tar.gz"
AGE_SHA256="5a3c7d8e12fb8b8b7b8c8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b8b"
fi
AGE_TAR="/tmp/age.tar.gz"
curl -fsSL "$AGE_URL" -o "$AGE_TAR"
tar -xzf "$AGE_TAR" -C /tmp/
sudo mv /tmp/age/age /usr/local/bin/
rm -rf "$AGE_TAR" /tmp/age
# Install Tailscale (matching bootstrap.sh implementation)
print "Installing Tailscale..."
if [[ "$docker" != "1" ]]; then
if [[ ! -d "/Applications/Tailscale.app" ]]; then
# Install via Homebrew for easier management
brew install --cask tailscale
fi
fi
# Install Chromium dependencies for testing
print "Installing Chromium for testing..."
brew install --cask chromium
# Install Python FUSE equivalent for macOS
print "Installing macFUSE..."
if [[ ! -d "/Library/Frameworks/macFUSE.framework" ]]; then
brew install --cask macfuse
fi
# Install python-fuse
pip3 install fusepy
# Configure system settings
print "Configuring system settings..."
# Disable sleep and screensaver
sudo pmset -a displaysleep 0 sleep 0 disksleep 0
sudo pmset -a womp 1
# Disable automatic updates
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticDownload -bool false
sudo defaults write /Library/Preferences/com.apple.SoftwareUpdate AutomaticallyInstallMacOSUpdates -bool false
# Increase file descriptor limits
echo 'kern.maxfiles=1048576' | sudo tee -a /etc/sysctl.conf
echo 'kern.maxfilesperproc=1048576' | sudo tee -a /etc/sysctl.conf
# Enable core dumps
sudo mkdir -p /cores
sudo chmod 777 /cores
echo 'kern.corefile=/cores/core.%P' | sudo tee -a /etc/sysctl.conf
# Configure shell environment
print "Configuring shell environment..."
# Add Homebrew paths to shell profiles
SHELL_PROFILES=(.zshrc .zprofile .bash_profile .bashrc)
for profile in "${SHELL_PROFILES[@]}"; do
if [[ -f "$HOME/$profile" ]] || [[ "$1" == "--ci" ]]; then
if [[ "$(uname -m)" == "arm64" ]]; then
echo 'export PATH="/opt/homebrew/bin:$PATH"' >> "$HOME/$profile"
else
echo 'export PATH="/usr/local/bin:$PATH"' >> "$HOME/$profile"
fi
# Add other useful paths
echo 'export PATH="/usr/local/bin/bun-ci:$PATH"' >> "$HOME/$profile"
echo 'export PATH="/usr/local/sbin:$PATH"' >> "$HOME/$profile"
# Environment variables for CI
echo 'export HOMEBREW_NO_INSTALL_CLEANUP=1' >> "$HOME/$profile"
echo 'export HOMEBREW_NO_AUTO_UPDATE=1' >> "$HOME/$profile"
echo 'export HOMEBREW_NO_ANALYTICS=1' >> "$HOME/$profile"
echo 'export CI=1' >> "$HOME/$profile"
echo 'export BUILDKITE=true' >> "$HOME/$profile"
# Development environment variables
echo 'export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"' >> "$HOME/$profile"
echo 'export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"' >> "$HOME/$profile"
# Node.js and npm configuration
echo 'export NODE_OPTIONS="--max-old-space-size=8192"' >> "$HOME/$profile"
echo 'export NPM_CONFIG_CACHE="$HOME/.npm"' >> "$HOME/$profile"
# Rust configuration
echo 'export CARGO_HOME="$HOME/.cargo"' >> "$HOME/$profile"
echo 'export RUSTUP_HOME="$HOME/.rustup"' >> "$HOME/$profile"
echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> "$HOME/$profile"
# Go configuration
echo 'export GOPATH="$HOME/go"' >> "$HOME/$profile"
echo 'export PATH="$GOPATH/bin:$PATH"' >> "$HOME/$profile"
# Python configuration
echo 'export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"' >> "$HOME/$profile"
# Bun configuration
echo 'export BUN_INSTALL="$HOME/.bun"' >> "$HOME/$profile"
echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> "$HOME/$profile"
# LLVM configuration
echo 'export PATH="/usr/local/opt/llvm/bin:$PATH"' >> "$HOME/$profile"
echo 'export LDFLAGS="-L/usr/local/opt/llvm/lib"' >> "$HOME/$profile"
echo 'export CPPFLAGS="-I/usr/local/opt/llvm/include"' >> "$HOME/$profile"
fi
done
# Create symbolic links for GNU tools
print "Creating symbolic links for GNU tools..."
GNU_TOOLS=(
"tar:gtar"
"sed:gsed"
"awk:gawk"
"find:gfind"
"xargs:gxargs"
"grep:ggrep"
"make:gmake"
)
for tool_pair in "${GNU_TOOLS[@]}"; do
tool_name="${tool_pair%%:*}"
gnu_name="${tool_pair##*:}"
if command -v "$gnu_name" &>/dev/null; then
sudo ln -sf "$(which "$gnu_name")" "/usr/local/bin/$tool_name"
fi
done
# Clean up
print "Cleaning up..."
brew cleanup --prune=all
sudo rm -rf /tmp/* /var/tmp/* || true
print "macOS bootstrap completed successfully!"
print "System is ready for Bun CI workloads."

View File

@@ -1,141 +0,0 @@
#!/bin/bash
# Clean up build user and all associated processes/files
# This ensures complete cleanup after each job
set -euo pipefail
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
error() {
print "ERROR: $*" >&2
exit 1
}
# Check if running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
USERNAME="${1:-}"
if [[ -z "$USERNAME" ]]; then
error "Usage: $0 <username>"
fi
print "Cleaning up build user: ${USERNAME}"
# Check if user exists
if ! id "${USERNAME}" &>/dev/null; then
print "User ${USERNAME} does not exist, nothing to clean up"
exit 0
fi
USER_HOME="/Users/${USERNAME}"
# Stop any background timeout processes
pkill -f "job-timeout.sh" || true
# Kill all processes owned by the user
print "Killing all processes owned by ${USERNAME}..."
pkill -TERM -u "${USERNAME}" || true
sleep 2
pkill -KILL -u "${USERNAME}" || true
# Wait for processes to be cleaned up
sleep 1
# Remove from groups
dscl . delete /Groups/admin GroupMembership "${USERNAME}" 2>/dev/null || true
dscl . delete /Groups/wheel GroupMembership "${USERNAME}" 2>/dev/null || true
dscl . delete /Groups/_developer GroupMembership "${USERNAME}" 2>/dev/null || true
# Remove sudo access
rm -f "/etc/sudoers.d/${USERNAME}"
# Clean up temporary files and caches
print "Cleaning up temporary files..."
if [[ -d "${USER_HOME}" ]]; then
# Clean up known cache directories
rm -rf "${USER_HOME}/.npm/_cacache" || true
rm -rf "${USER_HOME}/.npm/_logs" || true
rm -rf "${USER_HOME}/.cargo/registry" || true
rm -rf "${USER_HOME}/.cargo/git" || true
rm -rf "${USER_HOME}/.rustup/tmp" || true
rm -rf "${USER_HOME}/.cache" || true
rm -rf "${USER_HOME}/Library/Caches" || true
rm -rf "${USER_HOME}/Library/Logs" || true
rm -rf "${USER_HOME}/Library/Application Support/Crash Reports" || true
rm -rf "${USER_HOME}/tmp" || true
rm -rf "${USER_HOME}/.bun/install/cache" || true
# Clean up workspace
rm -rf "${USER_HOME}/workspace" || true
# Clean up any Docker containers/images created by this user
if command -v docker &>/dev/null; then
docker ps -a --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rm -f || true
docker images --filter "label=bk_user=${USERNAME}" -q | xargs -r docker rmi -f || true
fi
fi
# Clean up system-wide temporary files related to this user
rm -rf "/tmp/${USERNAME}-"* || true
rm -rf "/var/tmp/${USERNAME}-"* || true
# Clean up any core dumps
rm -f "/cores/core.${USERNAME}."* || true
# Clean up any launchd jobs
launchctl list | grep -E "^[0-9].*${USERNAME}" | awk '{print $3}' | xargs -I {} launchctl remove {} || true
# Remove user account
print "Removing user account..."
dscl . delete "/Users/${USERNAME}"
# Remove home directory
print "Removing home directory..."
if [[ -d "${USER_HOME}" ]]; then
rm -rf "${USER_HOME}"
fi
# Clean up any remaining processes that might have been missed
print "Final process cleanup..."
ps aux | grep -E "^${USERNAME}\s" | awk '{print $2}' | xargs -r kill -9 || true
# Clean up shared memory segments
ipcs -m | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -m || true
# Clean up semaphores
ipcs -s | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -s || true
# Clean up message queues
ipcs -q | grep "${USERNAME}" | awk '{print $2}' | xargs -r ipcrm -q || true
# Clean up any remaining files owned by the user
print "Cleaning up remaining files..."
find /tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
find /var/tmp -user "${USERNAME}" -exec rm -rf {} + 2>/dev/null || true
# Clean up any network interfaces or ports that might be held
lsof -t -u "${USERNAME}" 2>/dev/null | xargs -r kill -9 || true
# Clean up any mount points
mount | grep "${USERNAME}" | awk '{print $3}' | xargs -r umount || true
# Verify cleanup
if id "${USERNAME}" &>/dev/null; then
error "Failed to remove user ${USERNAME}"
fi
if [[ -d "${USER_HOME}" ]]; then
error "Failed to remove home directory ${USER_HOME}"
fi
print "Build user ${USERNAME} cleaned up successfully"
# Free up memory
sync
purge || true
print "Cleanup completed"

View File

@@ -1,158 +0,0 @@
#!/bin/bash
# Create isolated build user for each Buildkite job
# This ensures complete isolation between jobs
set -euo pipefail
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
error() {
print "ERROR: $*" >&2
exit 1
}
# Check if running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
# Generate unique user name
JOB_ID="${BUILDKITE_JOB_ID:-$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)}"
USERNAME="bk-${JOB_ID}"
USER_HOME="/Users/${USERNAME}"
print "Creating build user: ${USERNAME}"
# Check if user already exists
if id "${USERNAME}" &>/dev/null; then
print "User ${USERNAME} already exists, cleaning up first..."
/usr/local/bin/bun-ci/cleanup-build-user.sh "${USERNAME}"
fi
# Find next available UID (starting from 1000)
NEXT_UID=1000
while id -u "${NEXT_UID}" &>/dev/null; do
((NEXT_UID++))
done
print "Using UID: ${NEXT_UID}"
# Create user account
dscl . create "/Users/${USERNAME}"
dscl . create "/Users/${USERNAME}" UserShell /bin/bash
dscl . create "/Users/${USERNAME}" RealName "Buildkite Job ${JOB_ID}"
dscl . create "/Users/${USERNAME}" UniqueID "${NEXT_UID}"
dscl . create "/Users/${USERNAME}" PrimaryGroupID 20 # staff group
dscl . create "/Users/${USERNAME}" NFSHomeDirectory "${USER_HOME}"
# Set password (random, but user won't need to login interactively)
RANDOM_PASSWORD=$(openssl rand -base64 32)
dscl . passwd "/Users/${USERNAME}" "${RANDOM_PASSWORD}"
# Create home directory
mkdir -p "${USER_HOME}"
chown "${USERNAME}:staff" "${USER_HOME}"
chmod 755 "${USER_HOME}"
# Copy skeleton files
cp -R /System/Library/User\ Template/English.lproj/. "${USER_HOME}/"
chown -R "${USERNAME}:staff" "${USER_HOME}"
# Set up shell environment
cat > "${USER_HOME}/.zshrc" << 'EOF'
# Buildkite job environment
export PATH="/usr/local/bin:/usr/local/sbin:/opt/homebrew/bin:/opt/homebrew/sbin:$PATH"
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_AUTO_UPDATE=1
export HOMEBREW_NO_ANALYTICS=1
export CI=1
export BUILDKITE=true
# Development environment
export DEVELOPER_DIR="/Applications/Xcode.app/Contents/Developer"
export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"
# Node.js and npm
export NODE_OPTIONS="--max-old-space-size=8192"
export NPM_CONFIG_CACHE="$HOME/.npm"
# Rust
export CARGO_HOME="$HOME/.cargo"
export RUSTUP_HOME="$HOME/.rustup"
export PATH="$HOME/.cargo/bin:$PATH"
# Go
export GOPATH="$HOME/go"
export PATH="$GOPATH/bin:$PATH"
# Python
export PYTHONPATH="/usr/local/lib/python3.11/site-packages:/usr/local/lib/python3.12/site-packages:$PYTHONPATH"
# Bun
export BUN_INSTALL="$HOME/.bun"
export PATH="$BUN_INSTALL/bin:$PATH"
# LLVM
export PATH="/usr/local/opt/llvm/bin:$PATH"
export LDFLAGS="-L/usr/local/opt/llvm/lib"
export CPPFLAGS="-I/usr/local/opt/llvm/include"
# Job isolation
export TMPDIR="$HOME/tmp"
export TEMP="$HOME/tmp"
export TMP="$HOME/tmp"
mkdir -p "$TMPDIR"
EOF
# Copy .zshrc to other shell profiles
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bash_profile"
cp "${USER_HOME}/.zshrc" "${USER_HOME}/.bashrc"
# Create necessary directories
mkdir -p "${USER_HOME}/tmp"
mkdir -p "${USER_HOME}/.npm"
mkdir -p "${USER_HOME}/.cargo"
mkdir -p "${USER_HOME}/.rustup"
mkdir -p "${USER_HOME}/go"
mkdir -p "${USER_HOME}/.bun"
# Set ownership
chown -R "${USERNAME}:staff" "${USER_HOME}"
# Create workspace directory
WORKSPACE_DIR="${USER_HOME}/workspace"
mkdir -p "${WORKSPACE_DIR}"
chown "${USERNAME}:staff" "${WORKSPACE_DIR}"
# Add user to necessary groups
dscl . append /Groups/admin GroupMembership "${USERNAME}"
dscl . append /Groups/wheel GroupMembership "${USERNAME}"
dscl . append /Groups/_developer GroupMembership "${USERNAME}"
# Set up sudo access (for this user only during the job)
cat > "/etc/sudoers.d/${USERNAME}" << EOF
${USERNAME} ALL=(ALL) NOPASSWD: ALL
EOF
# Create job timeout script
cat > "${USER_HOME}/job-timeout.sh" << 'EOF'
#!/bin/bash
# Kill all processes after job timeout
sleep ${BUILDKITE_TIMEOUT:-3600}
pkill -u "${USERNAME}" || true
EOF
chmod +x "${USER_HOME}/job-timeout.sh"
chown "${USERNAME}:staff" "${USER_HOME}/job-timeout.sh"
print "Build user ${USERNAME} created successfully"
print "Home directory: ${USER_HOME}"
print "Workspace directory: ${WORKSPACE_DIR}"
# Output user info for the calling script
echo "BK_USER=${USERNAME}"
echo "BK_HOME=${USER_HOME}"
echo "BK_WORKSPACE=${WORKSPACE_DIR}"
echo "BK_UID=${NEXT_UID}"

View File

@@ -1,242 +0,0 @@
#!/bin/bash
# Main job runner script that manages the lifecycle of Buildkite jobs
# This script creates users, runs jobs, and cleans up afterward
set -euo pipefail
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
error() {
print "ERROR: $*" >&2
exit 1
}
# Ensure running as root
if [[ $EUID -ne 0 ]]; then
error "This script must be run as root"
fi
# Configuration
BUILDKITE_AGENT_TOKEN="${BUILDKITE_AGENT_TOKEN:-}"
BUILDKITE_QUEUE="${BUILDKITE_QUEUE:-default}"
BUILDKITE_TAGS="${BUILDKITE_TAGS:-queue=$BUILDKITE_QUEUE,os=macos,arch=$(uname -m)}"
LOG_DIR="/usr/local/var/log/buildkite-agent"
AGENT_CONFIG_DIR="/usr/local/var/buildkite-agent"
# Ensure directories exist
mkdir -p "$LOG_DIR"
mkdir -p "$AGENT_CONFIG_DIR"
# Function to cleanup on exit
cleanup() {
local exit_code=$?
print "Job runner exiting with code $exit_code"
# Clean up current user if set
if [[ -n "${CURRENT_USER:-}" ]]; then
print "Cleaning up user: $CURRENT_USER"
/usr/local/bin/bun-ci/cleanup-build-user.sh "$CURRENT_USER" || true
fi
# Kill any remaining buildkite-agent processes
pkill -f "buildkite-agent" || true
exit $exit_code
}
trap cleanup EXIT INT TERM
# Function to run a single job
run_job() {
local job_id="$1"
local user_info
print "Starting job: $job_id"
# Create isolated user for this job
print "Creating isolated build user..."
user_info=$(/usr/local/bin/bun-ci/create-build-user.sh)
# Parse user info
export BK_USER=$(echo "$user_info" | grep "BK_USER=" | cut -d= -f2)
export BK_HOME=$(echo "$user_info" | grep "BK_HOME=" | cut -d= -f2)
export BK_WORKSPACE=$(echo "$user_info" | grep "BK_WORKSPACE=" | cut -d= -f2)
export BK_UID=$(echo "$user_info" | grep "BK_UID=" | cut -d= -f2)
CURRENT_USER="$BK_USER"
print "Job will run as user: $BK_USER"
print "Workspace: $BK_WORKSPACE"
# Create job-specific configuration
local job_config="${AGENT_CONFIG_DIR}/buildkite-agent-${job_id}.cfg"
cat > "$job_config" << EOF
token="${BUILDKITE_AGENT_TOKEN}"
name="macos-$(hostname)-${job_id}"
tags="${BUILDKITE_TAGS}"
build-path="${BK_WORKSPACE}"
hooks-path="/usr/local/bin/bun-ci/hooks"
plugins-path="${BK_HOME}/.buildkite-agent/plugins"
git-clean-flags="-fdq"
git-clone-flags="-v"
shell="/bin/bash -l"
spawn=1
priority=normal
disconnect-after-job=true
disconnect-after-idle-timeout=300
cancel-grace-period=10
enable-job-log-tmpfile=true
job-log-tmpfile-path="/tmp/buildkite-job-${job_id}.log"
timestamp-lines=true
EOF
# Set permissions
chown "$BK_USER:staff" "$job_config"
chmod 600 "$job_config"
# Start timeout monitor in background
(
sleep "${BUILDKITE_TIMEOUT:-3600}"
print "Job timeout reached, killing all processes for user $BK_USER"
pkill -TERM -u "$BK_USER" || true
sleep 10
pkill -KILL -u "$BK_USER" || true
) &
local timeout_pid=$!
# Run buildkite-agent as the isolated user
print "Starting Buildkite agent for job $job_id..."
local agent_exit_code=0
sudo -u "$BK_USER" -H /usr/local/bin/buildkite-agent start \
--config "$job_config" \
--log-level info \
--no-color \
2>&1 | tee -a "$LOG_DIR/job-${job_id}.log" || agent_exit_code=$?
# Kill timeout monitor
kill $timeout_pid 2>/dev/null || true
print "Job $job_id completed with exit code: $agent_exit_code"
# Clean up job-specific files
rm -f "$job_config"
rm -f "/tmp/buildkite-job-${job_id}.log"
# Clean up the user
print "Cleaning up user $BK_USER..."
/usr/local/bin/bun-ci/cleanup-build-user.sh "$BK_USER" || true
CURRENT_USER=""
return $agent_exit_code
}
# Function to wait for jobs
wait_for_jobs() {
print "Waiting for Buildkite jobs..."
# Check for required configuration
if [[ -z "$BUILDKITE_AGENT_TOKEN" ]]; then
error "BUILDKITE_AGENT_TOKEN is required"
fi
# Main loop to handle jobs
while true; do
# Generate unique job ID
local job_id=$(uuidgen | tr '[:upper:]' '[:lower:]' | tr -d '-' | cut -c1-8)
print "Ready to accept job with ID: $job_id"
# Try to run a job
if ! run_job "$job_id"; then
print "Job $job_id failed, continuing..."
fi
# Brief pause before accepting next job
sleep 5
# Clean up any remaining processes
print "Performing system cleanup..."
pkill -f "buildkite-agent" || true
# Clean up temporary files
find /tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
find /var/tmp -name "buildkite-*" -mtime +1 -delete 2>/dev/null || true
# Clean up any orphaned users (safety net)
for user in $(dscl . list /Users | grep "^bk-"); do
if [[ -n "$user" ]]; then
print "Cleaning up orphaned user: $user"
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
fi
done
# Free up memory
sync
purge || true
print "System cleanup completed, ready for next job"
done
}
# Function to perform health checks
health_check() {
print "Performing health check..."
# Check disk space
local disk_usage=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
if [[ $disk_usage -gt 90 ]]; then
error "Disk usage is too high: ${disk_usage}%"
fi
# Check memory
local memory_pressure=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
if [[ $memory_pressure -lt 10 ]]; then
error "Memory pressure is too high: ${memory_pressure}% free"
fi
# Check if Docker is running
if ! pgrep -x "Docker" > /dev/null; then
print "Docker is not running, attempting to start..."
open -a Docker || true
sleep 30
fi
# Check if required commands are available
local required_commands=("git" "node" "npm" "bun" "python3" "go" "rustc" "cargo" "cmake" "make")
for cmd in "${required_commands[@]}"; do
if ! command -v "$cmd" &>/dev/null; then
error "Required command not found: $cmd"
fi
done
print "Health check passed"
}
# Main execution
case "${1:-start}" in
start)
print "Starting Buildkite job runner for macOS"
health_check
wait_for_jobs
;;
health)
health_check
;;
cleanup)
print "Performing manual cleanup..."
# Clean up any existing users
for user in $(dscl . list /Users | grep "^bk-"); do
if [[ -n "$user" ]]; then
print "Cleaning up user: $user"
/usr/local/bin/bun-ci/cleanup-build-user.sh "$user" || true
fi
done
print "Manual cleanup completed"
;;
*)
error "Usage: $0 {start|health|cleanup}"
;;
esac

View File

@@ -1,433 +0,0 @@
terraform {
required_version = ">= 1.0"
required_providers {
macstadium = {
source = "macstadium/macstadium"
version = "~> 1.0"
}
}
backend "s3" {
bucket = "bun-terraform-state"
key = "macos-runners/terraform.tfstate"
region = "us-west-2"
}
}
provider "macstadium" {
api_key = var.macstadium_api_key
endpoint = var.macstadium_endpoint
}
# Variables
variable "macstadium_api_key" {
description = "MacStadium API key"
type = string
sensitive = true
}
variable "macstadium_endpoint" {
description = "MacStadium API endpoint"
type = string
default = "https://api.macstadium.com"
}
variable "buildkite_agent_token" {
description = "Buildkite agent token"
type = string
sensitive = true
}
variable "github_token" {
description = "GitHub token for accessing private repositories"
type = string
sensitive = true
}
variable "image_name_prefix" {
description = "Prefix for VM image names"
type = string
default = "bun-macos"
}
variable "fleet_size" {
description = "Number of VMs per macOS version"
type = object({
macos_13 = number
macos_14 = number
macos_15 = number
})
default = {
macos_13 = 4
macos_14 = 6
macos_15 = 8
}
}
variable "vm_configuration" {
description = "VM configuration settings"
type = object({
cpu_count = number
memory_gb = number
disk_size = number
})
default = {
cpu_count = 12
memory_gb = 32
disk_size = 500
}
}
# Data sources to get latest images
data "macstadium_image" "macos_13" {
name_regex = "^${var.image_name_prefix}-13-.*"
most_recent = true
}
data "macstadium_image" "macos_14" {
name_regex = "^${var.image_name_prefix}-14-.*"
most_recent = true
}
data "macstadium_image" "macos_15" {
name_regex = "^${var.image_name_prefix}-15-.*"
most_recent = true
}
# Local values
locals {
common_tags = {
Project = "bun-ci"
Environment = "production"
ManagedBy = "terraform"
Purpose = "buildkite-runners"
}
vm_configs = {
macos_13 = {
image_id = data.macstadium_image.macos_13.id
count = var.fleet_size.macos_13
version = "13"
}
macos_14 = {
image_id = data.macstadium_image.macos_14.id
count = var.fleet_size.macos_14
version = "14"
}
macos_15 = {
image_id = data.macstadium_image.macos_15.id
count = var.fleet_size.macos_15
version = "15"
}
}
}
# VM instances for each macOS version
resource "macstadium_vm" "runners" {
for_each = {
for vm_combo in flatten([
for version, config in local.vm_configs : [
for i in range(config.count) : {
key = "${version}-${i + 1}"
version = version
config = config
index = i + 1
}
]
]) : vm_combo.key => vm_combo
}
name = "bun-runner-${each.value.version}-${each.value.index}"
image_id = each.value.config.image_id
cpu_count = var.vm_configuration.cpu_count
memory_gb = var.vm_configuration.memory_gb
disk_size = var.vm_configuration.disk_size
# Network configuration
network_interface {
network_id = macstadium_network.runner_network.id
ip_address = cidrhost(macstadium_network.runner_network.cidr_block, 10 + index(keys(local.vm_configs), each.value.version) * 100 + each.value.index)
}
# Enable GPU passthrough for better performance
gpu_passthrough = true
# Enable VNC for debugging
vnc_enabled = true
# SSH configuration
ssh_keys = [macstadium_ssh_key.runner_key.id]
# Startup script
user_data = templatefile("${path.module}/user-data.sh", {
buildkite_agent_token = var.buildkite_agent_token
github_token = var.github_token
macos_version = each.value.version
vm_name = "bun-runner-${each.value.version}-${each.value.index}"
})
# Auto-start VM
auto_start = true
# Shutdown behavior
auto_shutdown = false
tags = merge(local.common_tags, {
Name = "bun-runner-${each.value.version}-${each.value.index}"
MacOSVersion = each.value.version
VmIndex = each.value.index
})
}
# Network configuration
resource "macstadium_network" "runner_network" {
name = "bun-runner-network"
cidr_block = "10.0.0.0/16"
tags = merge(local.common_tags, {
Name = "bun-runner-network"
})
}
# SSH key for VM access
resource "macstadium_ssh_key" "runner_key" {
name = "bun-runner-key"
public_key = file("${path.module}/ssh-keys/bun-runner.pub")
tags = merge(local.common_tags, {
Name = "bun-runner-key"
})
}
# Security group for runner VMs
resource "macstadium_security_group" "runner_sg" {
name = "bun-runner-sg"
description = "Security group for Bun CI runner VMs"
# SSH access
ingress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# VNC access (for debugging)
ingress {
from_port = 5900
to_port = 5999
protocol = "tcp"
cidr_blocks = ["10.0.0.0/16"]
}
# HTTP/HTTPS outbound
egress {
from_port = 80
to_port = 80
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
from_port = 443
to_port = 443
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# Git (SSH)
egress {
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
# DNS
egress {
from_port = 53
to_port = 53
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
from_port = 53
to_port = 53
protocol = "udp"
cidr_blocks = ["0.0.0.0/0"]
}
tags = merge(local.common_tags, {
Name = "bun-runner-sg"
})
}
# Load balancer for distributing jobs
resource "macstadium_load_balancer" "runner_lb" {
name = "bun-runner-lb"
load_balancer_type = "application"
# Health check configuration
health_check {
enabled = true
healthy_threshold = 2
unhealthy_threshold = 3
timeout = 5
interval = 30
path = "/health"
port = 8080
protocol = "HTTP"
}
# Target group for all runner VMs
target_group {
name = "bun-runners"
port = 8080
protocol = "HTTP"
targets = [
for vm in macstadium_vm.runners : {
id = vm.id
port = 8080
}
]
}
tags = merge(local.common_tags, {
Name = "bun-runner-lb"
})
}
# Auto-scaling configuration
resource "macstadium_autoscaling_group" "runner_asg" {
name = "bun-runner-asg"
min_size = 2
max_size = 20
desired_capacity = sum(values(var.fleet_size))
health_check_type = "ELB"
health_check_grace_period = 300
# Launch template reference
launch_template {
id = macstadium_launch_template.runner_template.id
version = "$Latest"
}
# Scaling policies
target_group_arns = [macstadium_load_balancer.runner_lb.target_group[0].arn]
tags = merge(local.common_tags, {
Name = "bun-runner-asg"
})
}
# Launch template for auto-scaling
resource "macstadium_launch_template" "runner_template" {
name = "bun-runner-template"
image_id = data.macstadium_image.macos_15.id
instance_type = "mac-mini-m2-pro"
key_name = macstadium_ssh_key.runner_key.name
security_group_ids = [macstadium_security_group.runner_sg.id]
user_data = base64encode(templatefile("${path.module}/user-data.sh", {
buildkite_agent_token = var.buildkite_agent_token
github_token = var.github_token
macos_version = "15"
vm_name = "bun-runner-asg-${timestamp()}"
}))
tags = merge(local.common_tags, {
Name = "bun-runner-template"
})
}
# CloudWatch alarms for scaling
resource "macstadium_cloudwatch_metric_alarm" "scale_up" {
alarm_name = "bun-runner-scale-up"
comparison_operator = "GreaterThanThreshold"
evaluation_periods = "2"
metric_name = "CPUUtilization"
namespace = "AWS/EC2"
period = "300"
statistic = "Average"
threshold = "80"
alarm_description = "This metric monitors ec2 cpu utilization"
alarm_actions = [macstadium_autoscaling_policy.scale_up.arn]
dimensions = {
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
}
}
resource "macstadium_cloudwatch_metric_alarm" "scale_down" {
alarm_name = "bun-runner-scale-down"
comparison_operator = "LessThanThreshold"
evaluation_periods = "2"
metric_name = "CPUUtilization"
namespace = "AWS/EC2"
period = "300"
statistic = "Average"
threshold = "20"
alarm_description = "This metric monitors ec2 cpu utilization"
alarm_actions = [macstadium_autoscaling_policy.scale_down.arn]
dimensions = {
AutoScalingGroupName = macstadium_autoscaling_group.runner_asg.name
}
}
# Scaling policies
resource "macstadium_autoscaling_policy" "scale_up" {
name = "bun-runner-scale-up"
scaling_adjustment = 2
adjustment_type = "ChangeInCapacity"
cooldown = 300
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
}
resource "macstadium_autoscaling_policy" "scale_down" {
name = "bun-runner-scale-down"
scaling_adjustment = -1
adjustment_type = "ChangeInCapacity"
cooldown = 300
autoscaling_group_name = macstadium_autoscaling_group.runner_asg.name
}
# Outputs
output "vm_instances" {
description = "Details of created VM instances"
value = {
for key, vm in macstadium_vm.runners : key => {
id = vm.id
name = vm.name
ip_address = vm.network_interface[0].ip_address
image_id = vm.image_id
status = vm.status
}
}
}
output "load_balancer_dns" {
description = "DNS name of the load balancer"
value = macstadium_load_balancer.runner_lb.dns_name
}
output "network_id" {
description = "ID of the runner network"
value = macstadium_network.runner_network.id
}
output "security_group_id" {
description = "ID of the runner security group"
value = macstadium_security_group.runner_sg.id
}
output "autoscaling_group_name" {
description = "Name of the autoscaling group"
value = macstadium_autoscaling_group.runner_asg.name
}

View File

@@ -1,245 +0,0 @@
# VM instance outputs
output "vm_instances" {
description = "Details of all created VM instances"
value = {
for key, vm in macstadium_vm.runners : key => {
id = vm.id
name = vm.name
ip_address = vm.network_interface[0].ip_address
image_id = vm.image_id
status = vm.status
macos_version = regex("macos-([0-9]+)", key)[0]
instance_type = vm.instance_type
cpu_count = vm.cpu_count
memory_gb = vm.memory_gb
disk_size = vm.disk_size
created_at = vm.created_at
updated_at = vm.updated_at
}
}
}
output "vm_instances_by_version" {
description = "VM instances grouped by macOS version"
value = {
for version in ["13", "14", "15"] : "macos_${version}" => {
for key, vm in macstadium_vm.runners : key => {
id = vm.id
name = vm.name
ip_address = vm.network_interface[0].ip_address
status = vm.status
}
if can(regex("^${version}-", key))
}
}
}
# Network outputs
output "network_details" {
description = "Network configuration details"
value = {
network_id = macstadium_network.runner_network.id
cidr_block = macstadium_network.runner_network.cidr_block
name = macstadium_network.runner_network.name
status = macstadium_network.runner_network.status
}
}
output "security_group_details" {
description = "Security group configuration details"
value = {
security_group_id = macstadium_security_group.runner_sg.id
name = macstadium_security_group.runner_sg.name
description = macstadium_security_group.runner_sg.description
ingress_rules = macstadium_security_group.runner_sg.ingress
egress_rules = macstadium_security_group.runner_sg.egress
}
}
# Load balancer outputs
output "load_balancer_details" {
description = "Load balancer configuration details"
value = {
dns_name = macstadium_load_balancer.runner_lb.dns_name
zone_id = macstadium_load_balancer.runner_lb.zone_id
load_balancer_type = macstadium_load_balancer.runner_lb.load_balancer_type
target_group_arn = macstadium_load_balancer.runner_lb.target_group[0].arn
health_check = macstadium_load_balancer.runner_lb.health_check[0]
}
}
# Auto-scaling outputs
output "autoscaling_details" {
description = "Auto-scaling group configuration details"
value = {
asg_name = macstadium_autoscaling_group.runner_asg.name
min_size = macstadium_autoscaling_group.runner_asg.min_size
max_size = macstadium_autoscaling_group.runner_asg.max_size
desired_capacity = macstadium_autoscaling_group.runner_asg.desired_capacity
launch_template = macstadium_autoscaling_group.runner_asg.launch_template[0]
}
}
# SSH key outputs
output "ssh_key_details" {
description = "SSH key configuration details"
value = {
key_name = macstadium_ssh_key.runner_key.name
fingerprint = macstadium_ssh_key.runner_key.fingerprint
key_pair_id = macstadium_ssh_key.runner_key.id
}
}
# Image outputs
output "image_details" {
description = "Details of images used for VM creation"
value = {
macos_13 = {
id = data.macstadium_image.macos_13.id
name = data.macstadium_image.macos_13.name
description = data.macstadium_image.macos_13.description
created_date = data.macstadium_image.macos_13.creation_date
size = data.macstadium_image.macos_13.size
}
macos_14 = {
id = data.macstadium_image.macos_14.id
name = data.macstadium_image.macos_14.name
description = data.macstadium_image.macos_14.description
created_date = data.macstadium_image.macos_14.creation_date
size = data.macstadium_image.macos_14.size
}
macos_15 = {
id = data.macstadium_image.macos_15.id
name = data.macstadium_image.macos_15.name
description = data.macstadium_image.macos_15.description
created_date = data.macstadium_image.macos_15.creation_date
size = data.macstadium_image.macos_15.size
}
}
}
# Fleet statistics
output "fleet_statistics" {
description = "Statistics about the VM fleet"
value = {
total_vms = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
])
vms_by_version = {
macos_13 = var.fleet_size.macos_13
macos_14 = var.fleet_size.macos_14
macos_15 = var.fleet_size.macos_15
}
total_cpu_cores = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * var.vm_configuration.cpu_count
total_memory_gb = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * var.vm_configuration.memory_gb
total_disk_gb = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * var.vm_configuration.disk_size
}
}
# Connection information
output "connection_info" {
description = "Information for connecting to the infrastructure"
value = {
ssh_command_template = "ssh -i ~/.ssh/bun-runner admin@{vm_ip_address}"
vnc_port_range = "5900-5999"
health_check_url = "http://{vm_ip_address}:8080/health"
buildkite_tags = "queue=macos,os=macos,arch=$(uname -m)"
}
}
# Resource ARNs and IDs
output "resource_arns" {
description = "ARNs and IDs of created resources"
value = {
vm_ids = [
for vm in macstadium_vm.runners : vm.id
]
network_id = macstadium_network.runner_network.id
security_group_id = macstadium_security_group.runner_sg.id
load_balancer_arn = macstadium_load_balancer.runner_lb.arn
autoscaling_group_arn = macstadium_autoscaling_group.runner_asg.arn
launch_template_id = macstadium_launch_template.runner_template.id
}
}
# Monitoring and alerting
output "monitoring_endpoints" {
description = "Monitoring and alerting endpoints"
value = {
cloudwatch_namespace = "BunCI/MacOSRunners"
alarm_arns = [
macstadium_cloudwatch_metric_alarm.scale_up.arn,
macstadium_cloudwatch_metric_alarm.scale_down.arn
]
scaling_policy_arns = [
macstadium_autoscaling_policy.scale_up.arn,
macstadium_autoscaling_policy.scale_down.arn
]
}
}
# Cost information
output "cost_information" {
description = "Cost-related information"
value = {
estimated_hourly_cost = format("$%.2f", sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * 0.50) # Estimated cost per hour per VM
estimated_monthly_cost = format("$%.2f", sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
]) * 0.50 * 24 * 30) # Estimated monthly cost
cost_optimization_enabled = var.cost_optimization.enable_spot_instances
}
}
# Terraform state information
output "terraform_state" {
description = "Terraform state information"
value = {
workspace = terraform.workspace
terraform_version = "~> 1.0"
provider_versions = {
macstadium = "~> 1.0"
}
last_updated = timestamp()
}
}
# Summary output for easy reference
output "deployment_summary" {
description = "Summary of the deployment"
value = {
project_name = var.project_name
environment = var.environment
region = var.region
total_vms = sum([
var.fleet_size.macos_13,
var.fleet_size.macos_14,
var.fleet_size.macos_15
])
load_balancer_dns = macstadium_load_balancer.runner_lb.dns_name
autoscaling_enabled = var.autoscaling_enabled
backup_enabled = var.backup_config.enable_snapshots
monitoring_enabled = var.monitoring_config.enable_cloudwatch
deployment_time = timestamp()
status = "deployed"
}
}

View File

@@ -1,266 +0,0 @@
#!/bin/bash
# User data script for macOS VM initialization
# This script runs when the VM starts up
set -euo pipefail
# Variables passed from Terraform
BUILDKITE_AGENT_TOKEN="${buildkite_agent_token}"
GITHUB_TOKEN="${github_token}"
MACOS_VERSION="${macos_version}"
VM_NAME="${vm_name}"
# Logging
LOG_FILE="/var/log/vm-init.log"
exec 1> >(tee -a "$LOG_FILE")
exec 2> >(tee -a "$LOG_FILE" >&2)
print() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"
}
print "Starting VM initialization for $VM_NAME (macOS $MACOS_VERSION)"
# Wait for system to be ready
print "Waiting for system to be ready..."
until ping -c1 google.com &>/dev/null; do
sleep 10
done
# Set timezone
print "Setting timezone to UTC..."
sudo systemsetup -settimezone UTC
# Configure hostname
print "Setting hostname to $VM_NAME..."
sudo scutil --set HostName "$VM_NAME"
sudo scutil --set LocalHostName "$VM_NAME"
sudo scutil --set ComputerName "$VM_NAME"
# Update system
print "Checking for system updates..."
sudo softwareupdate -i -a --no-scan || true
# Configure Buildkite agent
print "Configuring Buildkite agent..."
mkdir -p /usr/local/var/buildkite-agent
mkdir -p /usr/local/var/log/buildkite-agent
# Create Buildkite agent configuration
cat > /usr/local/var/buildkite-agent/buildkite-agent.cfg << EOF
token="$BUILDKITE_AGENT_TOKEN"
name="$VM_NAME"
tags="queue=macos,os=macos,arch=$(uname -m),version=$MACOS_VERSION,hostname=$VM_NAME"
build-path="/Users/buildkite/workspace"
hooks-path="/usr/local/bin/bun-ci/hooks"
plugins-path="/Users/buildkite/.buildkite-agent/plugins"
git-clean-flags="-fdq"
git-clone-flags="-v"
shell="/bin/bash -l"
spawn=1
priority=normal
disconnect-after-job=false
disconnect-after-idle-timeout=0
cancel-grace-period=10
enable-job-log-tmpfile=true
timestamp-lines=true
EOF
# Set up GitHub token for private repositories
print "Configuring GitHub access..."
if [[ -n "$GITHUB_TOKEN" ]]; then
# Configure git to use the token
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "https://github.com/"
git config --global url."https://oauth2:$GITHUB_TOKEN@github.com/".insteadOf "git@github.com:"
# Configure npm to use the token
npm config set @oven-sh:registry https://npm.pkg.github.com/
echo "//npm.pkg.github.com/:_authToken=$GITHUB_TOKEN" >> ~/.npmrc
fi
# Set up SSH keys for GitHub (if available)
if [[ -f "/usr/local/etc/ssh/github_rsa" ]]; then
print "Configuring SSH keys for GitHub..."
mkdir -p ~/.ssh
cp /usr/local/etc/ssh/github_rsa ~/.ssh/
cp /usr/local/etc/ssh/github_rsa.pub ~/.ssh/
chmod 600 ~/.ssh/github_rsa
chmod 644 ~/.ssh/github_rsa.pub
# Configure SSH to use the key
cat > ~/.ssh/config << EOF
Host github.com
HostName github.com
User git
IdentityFile ~/.ssh/github_rsa
StrictHostKeyChecking no
EOF
fi
# Create health check endpoint
print "Setting up health check endpoint..."
cat > /usr/local/bin/health-check.sh << 'EOF'
#!/bin/bash
# Health check script for load balancer
set -euo pipefail
# Check if system is ready
if ! ping -c1 google.com &>/dev/null; then
echo "Network not ready"
exit 1
fi
# Check disk space
DISK_USAGE=$(df -h / | awk 'NR==2 {print $5}' | sed 's/%//')
if [[ $DISK_USAGE -gt 95 ]]; then
echo "Disk usage too high: ${DISK_USAGE}%"
exit 1
fi
# Check memory
MEMORY_PRESSURE=$(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}' | sed 's/%//')
if [[ $MEMORY_PRESSURE -lt 5 ]]; then
echo "Memory pressure too high: ${MEMORY_PRESSURE}% free"
exit 1
fi
# Check if required services are running
if ! pgrep -f "job-runner.sh" > /dev/null; then
echo "Job runner not running"
exit 1
fi
echo "OK"
exit 0
EOF
chmod +x /usr/local/bin/health-check.sh
# Start simple HTTP server for health checks
print "Starting health check server..."
cat > /usr/local/bin/health-server.sh << 'EOF'
#!/bin/bash
# Simple HTTP server for health checks
PORT=8080
while true; do
echo -e "HTTP/1.1 200 OK\r\nContent-Type: text/plain\r\n\r\n$(/usr/local/bin/health-check.sh)" | nc -l -p $PORT
done
EOF
chmod +x /usr/local/bin/health-server.sh
# Create LaunchDaemon for health check server
cat > /Library/LaunchDaemons/com.bun.health-server.plist << 'EOF'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.bun.health-server</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/health-server.sh</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/health-server.log</string>
<key>StandardErrorPath</key>
<string>/var/log/health-server.error.log</string>
</dict>
</plist>
EOF
# Load and start the health check server
sudo launchctl load /Library/LaunchDaemons/com.bun.health-server.plist
sudo launchctl start com.bun.health-server
# Configure log rotation
print "Configuring log rotation..."
cat > /etc/newsyslog.d/bun-ci.conf << 'EOF'
# Log rotation for Bun CI
/usr/local/var/log/buildkite-agent/*.log 644 5 1000 * GZ
/var/log/vm-init.log 644 5 1000 * GZ
/var/log/health-server.log 644 5 1000 * GZ
/var/log/health-server.error.log 644 5 1000 * GZ
EOF
# Restart syslog to pick up new configuration
sudo launchctl unload /System/Library/LaunchDaemons/com.apple.syslogd.plist
sudo launchctl load /System/Library/LaunchDaemons/com.apple.syslogd.plist
# Configure system monitoring
print "Setting up system monitoring..."
cat > /usr/local/bin/system-monitor.sh << 'EOF'
#!/bin/bash
# System monitoring script
LOG_FILE="/var/log/system-monitor.log"
while true; do
echo "[$(date '+%Y-%m-%d %H:%M:%S')] System Stats:" >> "$LOG_FILE"
echo " CPU: $(top -l 1 -n 0 | grep "CPU usage" | awk '{print $3}' | sed 's/%//')" >> "$LOG_FILE"
echo " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')" >> "$LOG_FILE"
echo " Disk: $(df -h / | awk 'NR==2 {print $5}')" >> "$LOG_FILE"
echo " Load: $(uptime | awk -F'load averages:' '{print $2}')" >> "$LOG_FILE"
echo " Processes: $(ps aux | wc -l)" >> "$LOG_FILE"
echo "" >> "$LOG_FILE"
sleep 300 # 5 minutes
done
EOF
chmod +x /usr/local/bin/system-monitor.sh
# Create LaunchDaemon for system monitoring
cat > /Library/LaunchDaemons/com.bun.system-monitor.plist << 'EOF'
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.bun.system-monitor</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/system-monitor.sh</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOF
# Load and start the system monitor
sudo launchctl load /Library/LaunchDaemons/com.bun.system-monitor.plist
sudo launchctl start com.bun.system-monitor
# Final configuration
print "Performing final configuration..."
# Ensure all services are running
sudo launchctl load /Library/LaunchDaemons/com.buildkite.buildkite-agent.plist
sudo launchctl start com.buildkite.buildkite-agent
# Create marker file to indicate initialization is complete
touch /var/tmp/vm-init-complete
echo "$(date '+%Y-%m-%d %H:%M:%S'): VM initialization completed" >> /var/tmp/vm-init-complete
print "VM initialization completed successfully!"
print "VM Name: $VM_NAME"
print "macOS Version: $MACOS_VERSION"
print "Status: Ready for Buildkite jobs"
# Log final system state
print "Final system state:"
print " Hostname: $(hostname)"
print " Uptime: $(uptime)"
print " Disk usage: $(df -h / | awk 'NR==2 {print $5}')"
print " Memory: $(memory_pressure | grep "System-wide memory free percentage" | awk '{print $5}')"
print "Health check available at: http://$(hostname):8080/health"

View File

@@ -1,302 +0,0 @@
# Core infrastructure variables
variable "project_name" {
description = "Name of the project"
type = string
default = "bun-ci"
}
variable "environment" {
description = "Environment name"
type = string
default = "production"
}
variable "region" {
description = "MacStadium region"
type = string
default = "us-west-1"
}
# MacStadium configuration
variable "macstadium_api_key" {
description = "MacStadium API key"
type = string
sensitive = true
}
variable "macstadium_endpoint" {
description = "MacStadium API endpoint"
type = string
default = "https://api.macstadium.com"
}
# Buildkite configuration
variable "buildkite_agent_token" {
description = "Buildkite agent token"
type = string
sensitive = true
}
variable "buildkite_org" {
description = "Buildkite organization slug"
type = string
default = "bun"
}
variable "buildkite_queues" {
description = "Buildkite queues to register agents with"
type = list(string)
default = ["macos", "macos-arm64", "macos-x86_64"]
}
# GitHub configuration
variable "github_token" {
description = "GitHub token for accessing private repositories"
type = string
sensitive = true
}
variable "github_org" {
description = "GitHub organization"
type = string
default = "oven-sh"
}
# VM fleet configuration
variable "fleet_size" {
description = "Number of VMs per macOS version"
type = object({
macos_13 = number
macos_14 = number
macos_15 = number
})
default = {
macos_13 = 4
macos_14 = 6
macos_15 = 8
}
validation {
condition = alltrue([
var.fleet_size.macos_13 >= 0,
var.fleet_size.macos_14 >= 0,
var.fleet_size.macos_15 >= 0,
var.fleet_size.macos_13 + var.fleet_size.macos_14 + var.fleet_size.macos_15 > 0
])
error_message = "Fleet sizes must be non-negative and at least one version must have VMs."
}
}
variable "vm_configuration" {
description = "VM configuration settings"
type = object({
cpu_count = number
memory_gb = number
disk_size = number
})
default = {
cpu_count = 12
memory_gb = 32
disk_size = 500
}
validation {
condition = alltrue([
var.vm_configuration.cpu_count >= 4,
var.vm_configuration.memory_gb >= 16,
var.vm_configuration.disk_size >= 100
])
error_message = "VM configuration must have at least 4 CPUs, 16GB memory, and 100GB disk."
}
}
# Auto-scaling configuration
variable "autoscaling_enabled" {
description = "Enable auto-scaling for VM fleet"
type = bool
default = true
}
variable "autoscaling_config" {
description = "Auto-scaling configuration"
type = object({
min_size = number
max_size = number
desired_capacity = number
scale_up_threshold = number
scale_down_threshold = number
scale_up_adjustment = number
scale_down_adjustment = number
cooldown_period = number
})
default = {
min_size = 2
max_size = 30
desired_capacity = 10
scale_up_threshold = 80
scale_down_threshold = 20
scale_up_adjustment = 2
scale_down_adjustment = 1
cooldown_period = 300
}
}
# Image configuration
variable "image_name_prefix" {
description = "Prefix for VM image names"
type = string
default = "bun-macos"
}
variable "image_rebuild_schedule" {
description = "Cron schedule for rebuilding images"
type = string
default = "0 2 * * *" # Daily at 2 AM
}
variable "image_retention_days" {
description = "Number of days to retain old images"
type = number
default = 7
}
# Network configuration
variable "network_config" {
description = "Network configuration"
type = object({
cidr_block = string
enable_nat = bool
enable_vpn = bool
allowed_cidrs = list(string)
})
default = {
cidr_block = "10.0.0.0/16"
enable_nat = true
enable_vpn = false
allowed_cidrs = ["0.0.0.0/0"]
}
}
# Security configuration
variable "security_config" {
description = "Security configuration"
type = object({
enable_ssh_access = bool
enable_vnc_access = bool
ssh_allowed_cidrs = list(string)
vnc_allowed_cidrs = list(string)
enable_disk_encryption = bool
})
default = {
enable_ssh_access = true
enable_vnc_access = true
ssh_allowed_cidrs = ["0.0.0.0/0"]
vnc_allowed_cidrs = ["10.0.0.0/16"]
enable_disk_encryption = true
}
}
# Monitoring configuration
variable "monitoring_config" {
description = "Monitoring configuration"
type = object({
enable_cloudwatch = bool
enable_custom_metrics = bool
log_retention_days = number
alert_email = string
})
default = {
enable_cloudwatch = true
enable_custom_metrics = true
log_retention_days = 30
alert_email = "devops@oven.sh"
}
}
# Backup configuration
variable "backup_config" {
description = "Backup configuration"
type = object({
enable_snapshots = bool
snapshot_schedule = string
snapshot_retention = number
enable_cross_region = bool
})
default = {
enable_snapshots = true
snapshot_schedule = "0 4 * * *" # Daily at 4 AM
snapshot_retention = 7
enable_cross_region = false
}
}
# Cost optimization
variable "cost_optimization" {
description = "Cost optimization settings"
type = object({
enable_spot_instances = bool
spot_price_max = number
enable_hibernation = bool
idle_shutdown_timeout = number
})
default = {
enable_spot_instances = false
spot_price_max = 0.0
enable_hibernation = false
idle_shutdown_timeout = 3600 # 1 hour
}
}
# Maintenance configuration
variable "maintenance_config" {
description = "Maintenance configuration"
type = object({
maintenance_window_start = string
maintenance_window_end = string
auto_update_enabled = bool
patch_schedule = string
})
default = {
maintenance_window_start = "02:00"
maintenance_window_end = "06:00"
auto_update_enabled = true
patch_schedule = "0 3 * * 0" # Weekly on Sunday at 3 AM
}
}
# Tagging
variable "tags" {
description = "Additional tags to apply to resources"
type = map(string)
default = {}
}
# SSH key configuration
variable "ssh_key_name" {
description = "Name of the SSH key pair"
type = string
default = "bun-runner-key"
}
variable "ssh_public_key_path" {
description = "Path to the SSH public key file"
type = string
default = "~/.ssh/id_rsa.pub"
}
# Feature flags
variable "feature_flags" {
description = "Feature flags for experimental features"
type = object({
enable_gpu_passthrough = bool
enable_nested_virt = bool
enable_secure_boot = bool
enable_tpm = bool
})
default = {
enable_gpu_passthrough = true
enable_nested_virt = false
enable_secure_boot = false
enable_tpm = false
}
}

View File

@@ -1,7 +0,0 @@
import { getCommit, getSecret } from "../../scripts/utils.mjs";
console.log("Submitting...");
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
method: "POST",
});
console.log("Got status " + response.status);

View File

@@ -1,254 +0,0 @@
#!/bin/bash
set -eo pipefail
function assert_main() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
echo "error: Cannot upload release from a fork"
exit 1
fi
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
echo "error: Cannot upload release from a pull request"
exit 1
fi
if [ "$BUILDKITE_BRANCH" != "main" ]; then
echo "error: Cannot upload release from a branch other than main"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v "buildkite-agent" &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_github() {
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
assert_buildkite_secret "GITHUB_TOKEN"
# gh expects the token in $GH_TOKEN
export GH_TOKEN="$GITHUB_TOKEN"
}
function assert_aws() {
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
assert_buildkite_secret "$secret"
done
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
}
function assert_sentry() {
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
assert_buildkite_secret "$secret"
done
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_buildkite_secret() {
local key="$1"
local value=$(buildkite-agent secret get "$key" ${@:2})
if [ -z "$value" ]; then
echo "error: Cannot find $key secret"
echo ""
echo "hint: Create a secret named $key with a value:"
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
exit 1
fi
export "$key"="$value"
}
function release_tag() {
local version="$1"
if [ "$version" == "canary" ]; then
echo "canary"
else
echo "bun-v$version"
fi
}
function create_sentry_release() {
local version="$1"
local release="$version"
if [ "$version" == "canary" ]; then
release="$BUILDKITE_COMMIT-canary"
fi
run_command sentry-cli releases new "$release" --finalize
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
if [ "$version" == "canary" ]; then
run_command sentry-cli deploys new --env="canary" --release="$release"
fi
}
function download_buildkite_artifact() {
local name="$1"
local dir="$2"
if [ -z "$dir" ]; then
dir="."
fi
run_command buildkite-agent artifact download "$name" "$dir"
if [ ! -f "$dir/$name" ]; then
echo "error: Cannot find Buildkite artifact: $name"
exit 1
fi
}
function upload_github_asset() {
local version="$1"
local tag="$(release_tag "$version")"
local file="$2"
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
echo "warn: Uploading $file to $tag failed, retrying..."
sleep "$((RANDOM % 5 + 1))"
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
done
}
function update_github_release() {
local version="$1"
local tag="$(release_tag "$version")"
if [ "$tag" == "canary" ]; then
sleep 5 # There is possibly a race condition where this overwrites artifacts?
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
fi
}
function upload_s3_file() {
local folder="$1"
local file="$2"
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_discord_announcement() {
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
if [ -z "$value" ]; then
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
return
fi
local version="$1"
local commit="$BUILDKITE_COMMIT"
local short_sha="${commit:0:7}"
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
if [ "$version" == "canary" ]; then
local json_payload=$(cat <<EOF
{
"embeds": [{
"title": "New Bun Canary now available",
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
"color": 16023551,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}]
}
EOF
)
curl -H "Content-Type: application/json" \
-d "$json_payload" \
-sf \
"$value" >/dev/null
fi
}
function create_release() {
assert_main
assert_buildkite_agent
assert_github
assert_aws
assert_sentry
local tag="$1" # 'canary' or 'x.y.z'
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-linux-aarch64-musl.zip
bun-linux-aarch64-musl-profile.zip
bun-linux-x64-musl.zip
bun-linux-x64-musl-profile.zip
bun-linux-x64-musl-baseline.zip
bun-linux-x64-musl-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
)
function upload_artifact() {
local artifact="$1"
download_buildkite_artifact "$artifact"
if [ "$tag" == "canary" ]; then
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
else
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
fi
upload_s3_file "releases/$tag" "$artifact" &
upload_github_asset "$tag" "$artifact" &
wait
}
for artifact in "${artifacts[@]}"; do
upload_artifact "$artifact"
done
update_github_release "$tag"
create_sentry_release "$tag"
send_discord_announcement "$tag"
}
function assert_canary() {
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
echo "warn: Skipping release because this is not a canary build"
exit 0
fi
}
assert_canary
create_release "canary"

View File

@@ -1,9 +0,0 @@
WarningsAsErrors: "*"
FormatStyle: webkit
Checks: >
-*,
clang-analyzer-*,
-clang-analyzer-optin.core.EnumCastOutOfRange
-clang-analyzer-webkit.UncountedLambdaCapturesChecker
-clang-analyzer-optin.core.EnumCastOutOfRange
-clang-analyzer-webkit.RefCntblBaseVirtualDtor

View File

@@ -1,8 +0,0 @@
Index:
Background: Skip # Disable slow background indexing of these files.
CompileFlags:
CompilationDatabase: build/debug
Diagnostics:
UnusedIncludes: None

View File

@@ -1,92 +0,0 @@
# Upgrading Bun's Self-Reported Node.js Version
This guide explains how to upgrade the Node.js version that Bun reports for compatibility with Node.js packages and native addons.
## Overview
Bun reports a Node.js version for compatibility with the Node.js ecosystem. This affects:
- `process.version` output
- Node-API (N-API) compatibility
- Native addon ABI compatibility
- V8 API compatibility for addons using V8 directly
## Files That Always Need Updates
### 1. Bootstrap Scripts
- `scripts/bootstrap.sh` - Update `NODEJS_VERSION=`
- `scripts/bootstrap.ps1` - Update `$NODEJS_VERSION =`
### 2. CMake Configuration
- `cmake/Options.cmake`
- `NODEJS_VERSION` - The Node.js version string (e.g., "24.3.0")
- `NODEJS_ABI_VERSION` - The ABI version number (find using command below)
### 3. Version Strings
- `src/bun.js/bindings/BunProcess.cpp`
- Update `Bun__versions_node` with the Node.js version
- Update `Bun__versions_v8` with the V8 version (find using command below)
### 4. N-API Version
- `src/napi/js_native_api.h`
- Update `NAPI_VERSION` define (check Node.js release notes)
## Files That May Need Updates
Only check these if the build fails or tests crash after updating version numbers:
- V8 compatibility files in `src/bun.js/bindings/v8/` (if V8 API changed)
- Test files (if Node.js requires newer C++ standard)
## Quick Commands to Find Version Info
```bash
# Get latest Node.js version info
curl -s https://nodejs.org/dist/index.json | jq '.[0]'
# Get V8 version for a specific Node.js version (replace v24.3.0)
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep V8_VERSION
# Get ABI version for a specific Node.js version
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep NODE_MODULE_VERSION
# Or use the ABI registry
curl -s https://raw.githubusercontent.com/nodejs/node/main/doc/abi_version_registry.json | jq '.NODE_MODULE_VERSION."<version>"'
```
## Update Process
1. **Gather version info** using the commands above
2. **Update the required files** listed in the sections above
3. **Build and test**:
```bash
bun bd
bun bd -e "console.log(process.version)"
bun bd -e "console.log(process.versions.v8)"
bun bd test test/v8/v8.test.ts
bun bd test test/napi/napi.test.ts
```
4. **Check for V8 API changes** only if build fails or tests crash:
- Compare v8-function-callback.h between versions
- Check v8-internal.h for Isolate size changes
- Look for new required APIs in build errors
## If Build Fails or Tests Crash
The V8 API rarely has breaking changes between minor Node.js versions. If you encounter issues:
1. Check build errors for missing symbols or type mismatches
2. Compare V8 headers between old and new Node.js versions
3. Most issues can be resolved by implementing missing functions or adjusting structures
## Testing Checklist
- [ ] `process.version` returns correct version
- [ ] `process.versions.v8` returns correct V8 version
- [ ] `process.config.variables.node_module_version` returns correct ABI
- [ ] V8 tests pass
- [ ] N-API tests pass
## Notes
- Most upgrades only require updating version numbers
- Major V8 version changes (rare) may require API updates
- The V8 shim implements only APIs used by common native addons

View File

@@ -1,23 +0,0 @@
Upgrade Bun's Webkit fork to the latest upstream version of Webkit.
To do that:
- cd vendor/WebKit
- git fetch upstream
- git merge upstream main
- Fix the merge conflicts
- cd ../../ (back to bun)
- make jsc-build (this will take about 7 minutes)
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
- cd vendor/WebKit
- git commit -am "Upgrade Webkit to the latest version"
- git push
- get the commit SHA in the vendor/WebKit directory of your new commit
- cd ../../ (back to bun)
- Update WEBKIT_VERSION in cmake/tools/SetupWebKit.cmake to the commit SHA of your new commit
- git checkout -b bun/webkit-upgrade-<commit-sha>
- commit + push (without adding the webkit-changes.md file)
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
- delete the webkit-changes.md file

View File

@@ -1,10 +0,0 @@
{
"snapshot": "snapshot-20250706-71021aff-cc0d-4a7f-a468-d443b16c4bf1",
"install": "bun install",
"terminals": [
{
"name": "bun build",
"command": "bun run build"
}
]
}

View File

@@ -1,41 +0,0 @@
---
description:
globs: src/**/*.cpp,src/**/*.zig
alwaysApply: false
---
### Build Commands
- **Build debug version**: `bun bd` or `bun run build:debug`
- Creates a debug build at `./build/debug/bun-debug`
- Compilation takes ~2.5 minutes
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
### Run a file
To run a file, use:
```sh
bun bd <file> <...args>
```
**CRITICAL**: Never use `bun <file>` directly. It will not have your changes.
### Logging
`BUN_DEBUG_$(SCOPE)=1` enables debug logs for a specific debug log scope.
Debug logs look like this:
```zig
const log = bun.Output.scoped(.${SCOPE}, false);
// ...later
log("MY DEBUG LOG", .{})
```
### Code Generation
Code generation happens automatically as part of the build process. There are no commands to run.

View File

@@ -1,139 +0,0 @@
---
description: Writing HMR/Dev Server tests
globs: test/bake/*
---
# Writing HMR/Dev Server tests
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
## File Structure
- `test/bake/bake-harness.ts` - shared utilities and test harness
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
- class `Dev` (controls subprocess for dev server)
- class `Client` (controls a happy-dom subprocess for having the page open)
- more helpers
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
## Categories
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
css.test.ts - CSS tests concern bundling bugs with CSS files
plugins.test.ts - Plugin tests concern plugins in development mode.
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
esm.test.ts - ESM tests are about various esm features in development mode.
html.test.ts - HTML tests are tests relating to HTML files themselves.
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
sourcemap.test.ts - Tests verifying source-maps are correct.
## `devTest` Basics
A test takes in two primary inputs: `files` and `async test(dev) {`
```ts
import { devTest, emptyHtmlFile } from "../bake-harness";
devTest("html file is watched", {
files: {
"index.html": emptyHtmlFile({
scripts: ["/script.ts"],
body: "<h1>Hello</h1>",
}),
"script.ts": `
console.log("hello");
`,
},
async test(dev) {
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.patch("index.html", {
find: "Hello",
replace: "World",
});
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
// Works
await using c = await dev.client("/");
await c.expectMessage("hello");
// Editing HTML reloads
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "World",
replace: "Hello",
});
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "Hello",
replace: "Bar",
});
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("script.ts", {
find: "hello",
replace: "world",
});
});
await c.expectMessage("world");
},
});
```
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to receive changes.
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
## Testing for bundling errors
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
```ts
devTest("import then create", {
files: {
"index.html": `
<!DOCTYPE html>
<html>
<head></head>
<body>
<script type="module" src="/script.ts"></script>
</body>
</html>
`,
"script.ts": `
import data from "./data";
console.log(data);
`,
},
async test(dev) {
const c = await dev.client("/", {
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
});
await c.expectReload(async () => {
await dev.write("data.ts", "export default 'data';");
});
await c.expectMessage("data");
},
});
```
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
```ts
await dev.delete("other.ts", {
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
});
```

View File

@@ -1,413 +0,0 @@
---
description: JavaScript class implemented in C++
globs: *.cpp
alwaysApply: false
---
# Implementing JavaScript classes in C++
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
- class FooPrototype : public JSC::JSNonFinalObject
- class FooConstructor : public JSC::InternalFunction
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
Generally, defining the subspace looks like this:
```c++
class Foo : public JSC::DestructibleObject {
// ...
template<typename MyClassT, JSC::SubspaceAccess mode>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
return nullptr;
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
vm,
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
}
```
It's better to put it in the .cpp file instead of the .h file, when possible.
## Defining properties
Define properties on the prototype. Use a const HashTableValues like this:
```C++
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
};
```
### Creating a prototype class
Follow a pattern like this:
```c++
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
public:
using Base = JSC::JSNonFinalObject;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
{
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
prototype->finishCreation(vm);
return prototype;
}
template<typename, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.plainObjectSpace();
}
DECLARE_INFO;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
structure->setMayBePrototype(true);
return structure;
}
private:
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure)
{
}
void finishCreation(JSC::VM& vm);
};
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
void JSX509CertificatePrototype::finishCreation(VM& vm)
{
Base::finishCreation(vm);
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
}
} // namespace Bun
```
### Getter definition:
```C++
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
return {};
}
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
}
```
### Setter definition
```C++
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
{
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject))
return false;
JSValue value = JSValue::decode(encodedValue);
if (!value.isCell()) {
// TODO:
return true;
}
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
return true;
}
```
### Function definition
```C++
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
return {};
}
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
}
```
### Constructor definition
```C++
JSC_DECLARE_HOST_FUNCTION(callStats);
JSC_DECLARE_HOST_FUNCTION(constructStats);
class JSStatsConstructor final : public JSC::InternalFunction {
public:
using Base = JSC::InternalFunction;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
{
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
constructor->finishCreation(vm, prototype);
return constructor;
}
DECLARE_INFO;
template<typename CellType, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.internalFunctionSpace();
}
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
}
private:
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure, callStats, constructStats)
{
}
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
{
Base::finishCreation(vm, 0, "Stats"_s);
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
}
};
```
### Structure caching
If there's a class, prototype, and constructor:
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
```c++#ZigGlobalObject.cpp
void GlobalObject::finishCreation(VM& vm) {
// ...
m_JSStatsBigIntClassStructure.initLater(
[](LazyClassStructure::Initializer& init) {
// Call the function to initialize our class structure.
Bun::initJSBigIntStatsClassStructure(init);
});
```
Then, implement the function that creates the structure:
```c++
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
{
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
void GlobalObject::finishCreation(VM& vm) {
// ...
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
});
```
Then, implement the function that creates the structure:
```c++
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
{
// If there is a prototype:
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
// If there is no prototype or it only has
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
```C++
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
if (!callFrame->argumentCount()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
return {};
}
JSValue arg = callFrame->uncheckedArgument(0);
if (!arg.isCell()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
return {};
}
auto* zigGlobalObject = defaultGlobalObject(globalObject);
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
JSValue newTarget = callFrame->newTarget();
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
auto scope = DECLARE_THROW_SCOPE(vm);
if (!newTarget) {
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
return {};
}
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
RETURN_IF_EXCEPTION(scope, {});
structure = InternalFunction::createSubclassStructure(globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
RETURN_IF_EXCEPTION(scope, {});
}
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
}
```
### Expose to Zig
To expose the constructor to zig:
```c++
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
{
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
}
```
Zig:
```zig
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
```
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
```c++
// X509* is whatever we need to create the object
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
{
// ... implementation details
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
}
```
And from Zig:
```zig
const X509 = opaque {
// ... class
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
return Bun__X509__toJS(globalObject, this);
}
};
```

View File

@@ -1,203 +0,0 @@
# Registering Functions, Objects, and Modules in Bun
This guide documents the process of adding new functionality to the Bun global object and runtime.
## Overview
Bun's architecture exposes functionality to JavaScript through a set of carefully registered functions, objects, and modules. Most core functionality is implemented in Zig, with JavaScript bindings that make these features accessible to users.
There are several key ways to expose functionality in Bun:
1. **Global Functions**: Direct methods on the `Bun` object (e.g., `Bun.serve()`)
2. **Getter Properties**: Lazily initialized properties on the `Bun` object (e.g., `Bun.sqlite`)
3. **Constructor Classes**: Classes available through the `Bun` object (e.g., `Bun.ValkeyClient`)
4. **Global Modules**: Modules that can be imported directly (e.g., `import {X} from "bun:*"`)
## The Registration Process
Adding new functionality to Bun involves several coordinated steps across multiple files:
### 1. Implement the Core Functionality in Zig
First, implement your feature in Zig, typically in its own directory in `src/`. Examples:
- `src/valkey/` for Redis/Valkey client
- `src/semver/` for SemVer functionality
- `src/smtp/` for SMTP client
### 2. Create JavaScript Bindings
Create bindings that expose your Zig functionality to JavaScript:
- Create a class definition file (e.g., `js_bindings.classes.ts`) to define the JavaScript interface
- Implement `JSYourFeature` struct in a file like `js_your_feature.zig`
Example from a class definition file:
```typescript
// Example from a .classes.ts file
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "YourFeature",
construct: true,
finalize: true,
hasPendingActivity: true,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
yourMethod: {
fn: "yourZigMethod",
length: 1,
},
property: {
getter: "getProperty",
},
},
values: ["cachedValues"],
}),
];
```
### 3. Register with BunObject in `src/bun.js/bindings/BunObject+exports.h`
Add an entry to the `FOR_EACH_GETTER` macro:
```c
// In BunObject+exports.h
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
macro(CryptoHasher) \
... \
macro(YourFeature) \
```
### 4. Create a Getter Function in `src/bun.js/api/BunObject.zig`
Implement a getter function in `BunObject.zig` that returns your feature:
```zig
// In BunObject.zig
pub const YourFeature = toJSGetter(Bun.getYourFeatureConstructor);
// In the exportAll() function:
@export(&BunObject.YourFeature, .{ .name = getterName("YourFeature") });
```
### 5. Implement the Getter Function in a Relevant Zig File
Implement the function that creates your object:
```zig
// In your main module file (e.g., src/your_feature/your_feature.zig)
pub fn getYourFeatureConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.YourFeature.getConstructor(globalThis);
}
```
### 6. Add to Build System
Ensure your files are included in the build system by adding them to the appropriate targets.
## Example: Adding a New Module
Here's a comprehensive example of adding a hypothetical SMTP module:
1. Create implementation files in `src/smtp/`:
- `index.zig`: Main entry point that exports everything
- `SmtpClient.zig`: Core SMTP client implementation
- `js_smtp.zig`: JavaScript bindings
- `js_bindings.classes.ts`: Class definition
2. Define your JS class in `js_bindings.classes.ts`:
```typescript
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "EmailClient",
construct: true,
finalize: true,
hasPendingActivity: true,
configurable: false,
memoryCost: true,
klass: {},
JSType: "0b11101110",
proto: {
send: {
fn: "send",
length: 1,
},
verify: {
fn: "verify",
length: 0,
},
close: {
fn: "close",
length: 0,
},
},
values: ["connectionPromise"],
}),
];
```
3. Add getter to `BunObject+exports.h`:
```c
#define FOR_EACH_GETTER(macro) \
macro(CSRF) \
... \
macro(SMTP) \
```
4. Add getter function to `BunObject.zig`:
```zig
pub const SMTP = toJSGetter(Bun.getSmtpConstructor);
// In exportAll:
@export(&BunObject.SMTP, .{ .name = getterName("SMTP") });
```
5. Implement getter in your module:
```zig
pub fn getSmtpConstructor(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue {
return JSC.API.JSEmailClient.getConstructor(globalThis);
}
```
## Best Practices
1. **Follow Naming Conventions**: Align your naming with existing patterns
2. **Reference Existing Modules**: Study similar modules like Valkey or S3Client for guidance
3. **Memory Management**: Be careful with memory management and reference counting
4. **Error Handling**: Use `bun.JSError!JSValue` for proper error propagation
5. **Documentation**: Add JSDoc comments to your JavaScript bindings
6. **Testing**: Add tests for your new functionality
## Common Gotchas
- Be sure to handle reference counting properly with `ref()`/`deref()`
- Always implement proper cleanup in `deinit()` and `finalize()`
- For network operations, manage socket lifetimes correctly
- Use `JSC.Codegen` correctly to generate necessary binding code
## Related Files
- `src/bun.js/bindings/BunObject+exports.h`: Registration of getters and functions
- `src/bun.js/api/BunObject.zig`: Implementation of getters and object creation
- `src/bun.js/api/BunObject.classes.ts`: Class definitions
- `.cursor/rules/zig-javascriptcore-classes.mdc`: More details on class bindings
## Additional Resources
For more detailed information on specific topics:
- See `zig-javascriptcore-classes.mdc` for details on creating JS class bindings
- Review existing modules like `valkey`, `sqlite`, or `s3` for real-world examples

View File

@@ -1,91 +0,0 @@
---
description: Writing tests for Bun
globs:
---
# Writing tests for Bun
## Where tests are found
You'll find all of Bun's tests in the `test/` directory.
* `test/`
* `cli/` - CLI command tests, like `bun install` or `bun init`
* `js/` - JavaScript & TypeScript tests
* `bun/` - `Bun` APIs tests, separated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, separated by module, for example: `assert/` for `node:assert` tests
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
* `web/` - Web API tests, separated by category, for example: `fetch/` for `Request` and `Response` tests
* `third_party/` - npm package tests, to validate that basic usage works in Bun
* `napi/` - N-API tests
* `v8/` - V8 C++ API tests
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
## How tests are written
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
```js
import { describe, test, expect } from "bun:test";
import assert, { AssertionError } from "assert";
describe("assert(expr)", () => {
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
expect(() => assert(expr)).not.toThrow();
});
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
expect(() => assert(expr)).toThrow(AssertionError);
});
});
```
## Testing conventions
* See `test/harness.ts` for common test utilities and helpers
* Be rigorous and test for edge-cases and unexpected inputs
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
* When you need to test Bun as a CLI, use the following pattern:
```js
import { test, expect } from "bun:test";
import { spawn } from "bun";
import { bunExe, bunEnv } from "harness";
test("bun --version", async () => {
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
cmd: [bunExe(), "--version"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [ exitCode, stdout, stderr ] = await Promise.all([
exited,
new Response(stdoutStream).text(),
new Response(stderrStream).text(),
]);
expect({ exitCode, stdout, stderr }).toMatchObject({
exitCode: 0,
stdout: expect.stringContaining(Bun.version),
stderr: "",
});
});
```
## Before writing a test
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
* Always attempt to find related tests in an existing test file before creating a new test file

View File

@@ -1,509 +0,0 @@
---
description: How Zig works with JavaScriptCore bindings generator
globs:
alwaysApply: false
---
# Bun's JavaScriptCore Class Bindings Generator
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
## Architecture Overview
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
1. **Zig Implementation** (.zig files)
2. **JavaScript Interface Definition** (.classes.ts files)
3. **Generated Code** (C++/Zig files that connect everything)
## Class Definition Files
### JavaScript Interface (.classes.ts)
The `.classes.ts` files define the JavaScript API using a declarative approach:
```typescript
// Example: encoding.classes.ts
define({
name: "TextDecoder",
constructor: true,
JSType: "object",
finalize: true,
proto: {
decode: {
// Function definition
args: 1,
},
encoding: {
// Getter with caching
getter: true,
cache: true,
},
fatal: {
// Read-only property
getter: true,
},
ignoreBOM: {
// Read-only property
getter: true,
},
},
});
```
Each class definition specifies:
- The class name
- Whether it has a constructor
- JavaScript type (object, function, etc.)
- Properties and methods in the `proto` field
- Caching strategy for properties
- Finalization requirements
### Zig Implementation (.zig)
The Zig files implement the native functionality:
```zig
// Example: TextDecoder.zig
pub const TextDecoder = struct {
// Expose generated bindings as `js` namespace with trait conversion methods
pub const js = JSC.Codegen.JSTextDecoder;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// Internal state
encoding: []const u8,
fatal: bool,
ignoreBOM: bool,
// Constructor implementation - note use of globalObject
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*TextDecoder {
// Implementation
return bun.new(TextDecoder, .{
// Fields
});
}
// Prototype methods - note return type includes JSError
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!JSC.JSValue {
// Implementation
}
// Getters
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
}
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.jsBoolean(this.fatal);
}
// Cleanup - note standard pattern of using deinit/deref
fn deinit(this: *TextDecoder) void {
// Release any retained resources
// Free the pointer at the end.
bun.destroy(this);
}
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
pub fn finalize(this: *TextDecoder) void {
this.deinit();
}
};
```
Key components in the Zig file:
- The struct containing native state
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
- Consistent use of `globalObject` parameter name instead of `ctx`
- Methods matching the JavaScript interface
- Getters/setters for properties
- Proper resource cleanup pattern with `deinit()` and `finalize()`
- Update `src/bun.js/bindings/generated_classes_list.zig` to include the new class
## Code Generation System
The binding generator produces C++ code that connects JavaScript and Zig:
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
3. **Method Binding**: Connects JS function calls to Zig implementations
4. **Type Conversion**: Converts between JS values and Zig types
5. **Property Caching**: Implements the caching system for properties
The generated C++ code includes:
- A JSC wrapper class (`JSTextDecoder`)
- A prototype class (`JSTextDecoderPrototype`)
- A constructor function (`JSTextDecoderConstructor`)
- Function bindings (`TextDecoderPrototype__decodeCallback`)
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
## CallFrame Access
The `CallFrame` object provides access to JavaScript execution context:
```zig
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame
) bun.JSError!JSC.JSValue {
// Get arguments
const input = callFrame.argument(0);
const options = callFrame.argument(1);
// Get this value
const thisValue = callFrame.thisValue();
// Implementation with error handling
if (input.isUndefinedOrNull()) {
return globalObject.throw("Input cannot be null or undefined", .{});
}
// Return value or throw error
return JSC.JSValue.jsString(globalObject, "result");
}
```
CallFrame methods include:
- `argument(i)`: Get the i-th argument
- `argumentCount()`: Get the number of arguments
- `thisValue()`: Get the `this` value
- `callee()`: Get the function being called
## Property Caching and GC-Owned Values
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
```typescript
encoding: {
getter: true,
cache: true, // Enable caching
}
```
### C++ Implementation
In the generated C++ code, caching uses JSC's WriteBarrier:
```cpp
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
auto& vm = JSC::getVM(lexicalGlobalObject);
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
auto throwScope = DECLARE_THROW_SCOPE(vm);
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
// Check for cached value and return if present
if (JSValue cachedValue = thisObject->m_encoding.get())
return JSValue::encode(cachedValue);
// Get value from Zig implementation
JSC::JSValue result = JSC::JSValue::decode(
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
);
RETURN_IF_EXCEPTION(throwScope, {});
// Store in cache for future access
thisObject->m_encoding.set(vm, thisObject, result);
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
}
```
### Zig Accessor Functions
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
```zig
// External function declarations
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
/// `TextDecoder.encoding` setter
/// This value will be visited by the garbage collector.
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
JSC.markBinding(@src());
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
}
/// `TextDecoder.encoding` getter
/// This value will be visited by the garbage collector.
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
JSC.markBinding(@src());
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
if (result == .zero)
return null;
return result;
}
```
### Benefits of GC-Owned Values
This system provides several key benefits:
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
3. **Consistent Access**: Zig code can easily get/set these cached JS values
4. **Performance**: Cached values avoid repeated computation or serialization
### Use Cases
GC-owned cached values are particularly useful for:
1. **Computed Properties**: Store expensive computation results
2. **Lazily Created Objects**: Create objects only when needed, then cache them
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
4. **Memoization**: Cache results based on input parameters
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
## Memory Management and Finalization
The binding system handles memory management across the JavaScript/Zig boundary:
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
Bun uses a consistent pattern for resource cleanup:
```zig
// Resource cleanup method - separate from finalization
pub fn deinit(this: *TextDecoder) void {
// Release resources like strings
this._encoding.deref(); // String deref pattern
// Free any buffers
if (this.buffer) |buffer| {
bun.default_allocator.free(buffer);
}
}
// Called by the GC when object is collected
pub fn finalize(this: *TextDecoder) void {
JSC.markBinding(@src()); // For debugging
this.deinit(); // Clean up resources
bun.default_allocator.destroy(this); // Free the object itself
}
```
Some objects that hold references to other JS objects use `.deref()` instead:
```zig
pub fn finalize(this: *SocketAddress) void {
JSC.markBinding(@src());
this._presentation.deref(); // Release references
this.destroy();
}
```
## Error Handling with JSError
Bun uses `bun.JSError!JSValue` return type for proper error handling:
```zig
pub fn decode(
this: *TextDecoder,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame
) bun.JSError!JSC.JSValue {
// Throwing an error
if (callFrame.argumentCount() < 1) {
return globalObject.throw("Missing required argument", .{});
}
// Or returning a success value
return JSC.JSValue.jsString(globalObject, "Success!");
}
```
This pattern allows Zig functions to:
1. Return JavaScript values on success
2. Throw JavaScript exceptions on error
3. Propagate errors automatically through the call stack
## Type Safety and Error Handling
The binding system includes robust error handling:
```cpp
// Example of type checking in generated code
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
scope.throwException(lexicalGlobalObject,
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
return {};
}
```
## Prototypal Inheritance
The binding system creates proper JavaScript prototype chains:
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
2. **Prototype**: JSTextDecoderPrototype with methods and properties
3. **Instances**: Each JSTextDecoder instance with **proto** pointing to prototype
This ensures JavaScript inheritance works as expected:
```cpp
// From generated code
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
{
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
// Set up the prototype chain
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
ASSERT(inherits(info()));
}
```
## Performance Considerations
The binding system is optimized for performance:
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
## Creating a New Class Binding
To create a new class binding in Bun:
1. **Define the class interface** in a `.classes.ts` file:
```typescript
define({
name: "MyClass",
constructor: true,
finalize: true,
proto: {
myMethod: {
args: 1,
},
myProperty: {
getter: true,
cache: true,
},
},
});
```
2. **Implement the native functionality** in a `.zig` file:
```zig
pub const MyClass = struct {
// Generated bindings
pub const js = JSC.Codegen.JSMyClass;
pub const toJS = js.toJS;
pub const fromJS = js.fromJS;
pub const fromJSDirect = js.fromJSDirect;
// State
value: []const u8,
pub const new = bun.TrivialNew(@This());
// Constructor
pub fn constructor(
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!*MyClass {
const arg = callFrame.argument(0);
// Implementation
}
// Method
pub fn myMethod(
this: *MyClass,
globalObject: *JSGlobalObject,
callFrame: *JSC.CallFrame,
) bun.JSError!JSC.JSValue {
// Implementation
}
// Getter
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
return JSC.JSValue.jsString(globalObject, this.value);
}
// Resource cleanup
pub fn deinit(this: *MyClass) void {
// Clean up resources
}
pub fn finalize(this: *MyClass) void {
this.deinit();
bun.destroy(this);
}
};
```
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
- C++ class definitions
- Method and property bindings
- Memory management utilities
- GC integration code
## Generated Code Structure
The binding generator produces several components:
### 1. C++ Classes
For each Zig class, the system generates:
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
### 2. C++ Methods and Properties
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
- **Initialization Functions**: `finishCreation` methods for setting up the class
### 3. Zig Bindings
- **External Function Declarations**:
```zig
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
```
- **Cached Value Accessors**:
```zig
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
```
- **Constructor Helpers**:
```zig
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
```
### 4. GC Integration
- **Memory Cost Calculation**: `estimatedSize` method
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.

View File

@@ -1,7 +0,0 @@
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
bench
vendor
*-fixture.{js,ts}
zig-cache
packages/bun-uws/fuzzing
build

1539
.docker/chrome.json Normal file

File diff suppressed because it is too large Load Diff

14
.docker/chromium.pref Normal file
View File

@@ -0,0 +1,14 @@
# Note: 2 blank lines are required between entries
Package: *
Pin: release a=eoan
Pin-Priority: 500
Package: *
Pin: origin "ftp.debian.org"
Pin-Priority: 300
# Pattern includes 'chromium', 'chromium-browser' and similarly
# named dependencies:
Package: chromium*
Pin: origin "ftp.debian.org"
Pin-Priority: 700

View File

@@ -0,0 +1,8 @@
#!/bin/bash
set -euxo pipefail
name=$(openssl rand -hex 12)
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
echo -e "bun-binary-$name"

3
.docker/debian.list Normal file
View File

@@ -0,0 +1,3 @@
deb http://deb.debian.org/debian buster main
deb http://deb.debian.org/debian buster-updates main
deb http://deb.debian.org/debian-security buster/updates main

View File

@@ -0,0 +1,34 @@
export DOCKER_BUILDKIT=1
export BUILDKIT_ARCH=$(uname -m)
export ARCH=${BUILDKIT_ARCH}
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
export BUILDKIT_ARCH="amd64"
export ARCH=x64
fi
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
export BUILDKIT_ARCH="amd64"
export ARCH=x64
fi
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
export BUILDKIT_ARCH="arm64"
export ARCH=aarch64
fi
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
export BUILDKIT_ARCH="arm64"
export ARCH=aarch64
fi
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
echo "Unsupported platform: $BUILDKIT_ARCH"
exit 1
fi
export BUILD_ID=$(cat build-id)
export CONTAINER_NAME=bun-linux-$ARCH
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
export TEMP=/tmp/bun-0.0.$BUILD_ID

11
.docker/pull.sh Normal file
View File

@@ -0,0 +1,11 @@
#!/bin/bash
set -euxo pipefail
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
docker tag bunbunbunbun/bun-base:latest bun-base:latest
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest

47
.docker/run-dockerfile.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
source "dockerfile-common.sh"
export $CONTAINER_NAME=$CONTAINER_NAME-local
rm -rf $TEMP
mkdir -p $TEMP
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
if (($?)); then
echo "Failed to build container"
exit 1
fi
id=$(docker create $CONTAINER_NAME:latest)
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
if (($?)); then
echo "Failed to cp container"
exit 1
fi
cd $TEMP
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
docker rm -v $id
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
case $(uname -s) in
"Linux") target="linux" ;;
*) target="other" ;;
esac
if [ "$target" = "linux" ]; then
if command -v bun --version >/dev/null; then
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
fi
fi
echo "Saved to:"
echo $debug_abs
echo $abs

9
.docker/run-test.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
set -euxo pipefail
bun install
bun install --cwd ./test/snippets
bun install --cwd ./test/scripts
make $BUN_TEST_NAME

5
.docker/runner.sh Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -euxo pipefail
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest

5
.docker/unit-tests.sh Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -euxo pipefail
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest

View File

@@ -11,11 +11,5 @@ packages/**/bun-profile
src/bun.js/WebKit
src/bun.js/WebKit/LayoutTests
zig-build
.zig-cache
zig-out
build
vendor
node_modules
*.trace
packages/bun-uws/fuzzing
zig-cache
zig-out

View File

@@ -1,10 +0,0 @@
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
# avoid mucking up blames.
#
# To configure git to use this, run:
#
# git config blame.ignoreRevsFile .git-blame-ignore-revs
#
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
dedd433cbf2e2fe38e51bc166e08fbcc601ad42b # JSValue.undefined -> .jsUndefined()
6b4662ff55f58247cc2fd22e85b4f9805b0950a5 # JSValue.jsUndefined() -> .js_undefined

14
.gitattributes vendored
View File

@@ -7,7 +7,6 @@
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.toml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
@@ -15,7 +14,6 @@
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
@@ -45,13 +43,5 @@ test/**/* linguist-documentation
bench/**/* linguist-documentation
examples/**/* linguist-documentation
vendor/*.c linguist-vendored
vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored
.vscode/*.json linguist-language=JSON-with-Comments
src/cli/init/tsconfig.default.json linguist-language=JSON-with-Comments
src/deps/*.c linguist-vendored
src/deps/brotli/** linguist-vendored

5
.github/CODEOWNERS vendored
View File

@@ -1,5 +0,0 @@
# Project
/.github/CODEOWNERS @Jarred-Sumner
# Tests
/test/expectations.txt @Jarred-Sumner

View File

@@ -1,8 +1,6 @@
name: 🐛 Bug Report
description: Report an issue that should be fixed
labels:
- bug
- needs triage
labels: [bug]
body:
- type: markdown
attributes:
@@ -12,7 +10,7 @@ body:
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.

View File

@@ -1,45 +1,45 @@
name: 🇹 TypeScript Type Bug Report
description: Report an issue with TypeScript types
labels: [bug, types]
labels: [bug, typescript]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -1,28 +0,0 @@
name: Prefilled crash report
description: Report a crash in Bun
labels:
- crash
- needs triage
body:
- type: markdown
attributes:
value: |
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
- type: textarea
id: code
attributes:
label: How can we reproduce the crash?
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be
automatically formatted into code, so no need for backticks.
render: shell
- type: textarea
id: remapped_trace
attributes:
label: Stack Trace (bun.report)
validations:
required: true

View File

@@ -1,34 +0,0 @@
name: bun install crash report
description: Report a crash in bun install
labels:
- npm
- crash
body:
- type: markdown
attributes:
value: |
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
- type: textarea
id: package_json
attributes:
label: "`package.json` file"
description: "Can you upload your `package.json` file? This helps us reproduce the crash."
render: json
- type: textarea
id: repro
attributes:
label: How can we reproduce the crash?
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be
automatically formatted into code, so no need for backticks.
render: shell
- type: textarea
id: remapped_trace
attributes:
label: Stack Trace (bun.report)
validations:
required: true

View File

@@ -1,43 +0,0 @@
name: Bump version
description: Bump the version of Bun
inputs:
version:
description: The most recent version of Bun.
required: true
type: string
token:
description: The GitHub token to use for creating a pull request.
required: true
type: string
default: ${{ github.token }}
runs:
using: composite
steps:
- name: Run Bump
shell: bash
id: bump
run: |
set -euo pipefail
MESSAGE=$(bun ./scripts/bump.ts patch --last-version=${{ inputs.version }})
LATEST=$(cat LATEST)
echo "version=$LATEST" >> $GITHUB_OUTPUT
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
with:
add-paths: |
CMakeLists.txt
LATEST
token: ${{ inputs.token }}
commit-message: Bump version to ${{ steps.bump.outputs.version }}
title: Bump to ${{ steps.bump.outputs.version }}
delete-branch: true
branch: github-actions/bump-version-${{ steps.bump.outputs.version }}--${{ github.run_id }}
body: |
## What does this PR do?
${{ steps.bump.outputs.message }}
Auto-bumped by [this workflow](https://github.com/oven-sh/bun/actions/workflows/release.yml)

View File

@@ -4,7 +4,7 @@ description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
baseline:
@@ -30,7 +30,7 @@ runs:
*) os=windows;;
esac
case "$(uname -m)" in
arm64 | aarch64) arch=aarch64;;
arm64 | aarch64) arch=arm64;;
*) arch=x64;;
esac
case "${{ inputs.baseline }}" in
@@ -42,10 +42,9 @@ runs:
canary) release="canary";;
*) release="bun-v${{ inputs.bun-version }}";;
esac
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
unzip ${target}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}

View File

@@ -28,7 +28,7 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
-->

18
.github/workflows/bun-deploy-site.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
name: Deploy site
on:
push:
paths:
- "docs/**"
branches: [main]
jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

142
.github/workflows/bun-linux-aarch64.yml vendored Normal file
View File

@@ -0,0 +1,142 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
on:
push:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v4
with:
submodules: false
ref: ${{github.sha}}
clean: true
- run: |
bash ./scripts/update-submodules.sh
- uses: docker/setup-buildx-action@v3
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
GIT_SHA=${{github.sha}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"

319
.github/workflows/bun-linux-build.yml vendored Normal file
View File

@@ -0,0 +1,319 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
on:
push:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
permissions: write-all
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
target: "artifact"
- cpu: nehalem
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
target: "artifact"
# - cpu: haswell
# tag: linux-x64-assertions
# arch: x86_64
# build_arch: amd64
# runner: big-ubuntu
# build_machine_arch: x86_64
# assertions: "ON"
# zig_optimize: "ReleaseSafe"
# target: "artifact-assertions"
# - cpu: nehalem
# tag: linux-x64-baseline-assertions
# arch: x86_64
# build_arch: amd64
# runner: big-ubuntu
# build_machine_arch: x86_64
# assertions: "ON"
# zig_optimize: "ReleaseSafe"
# target: "artifact-assertions"
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
ref: ${{github.sha}}
clean: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
push: false
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
GIT_SHA=${{github.sha}}
ASSERTIONS=${{matrix.assertions}}
ZIG_OPTIMIZE=${{matrix.zig_optimize}}
platforms: linux/${{matrix.build_arch}}
target: ${{matrix.target}}
outputs: type=local,dest=${{runner.temp}}/release
- id: bun-version-check
name: Bun version check
run: |
# If this hangs, it means something is seriously wrong with the build
${{runner.temp}}/release/bun-profile --version
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
linux-test:
name: Tests ${{matrix.tag}}
runs-on: namespace-profile-bun-linux-x64
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
permissions:
pull-requests: write
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
# - tag: linux-x64-assertions
# - tag: linux-x64-baseline-assertions
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
with:
submodules: false
clean: true
- id: download
name: Download
uses: actions/download-artifact@v4
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
- id: bun-version-check
name: Bun version check
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
- id: install-dependnecies
name: Install dependencies
run: |
sudo apt-get update && sudo apt-get install -y openssl
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
bun install --cwd=test/js/third_party/prisma --verbose
# This is disabled because the cores are ~5.5gb each
# so it is easy to hit 50gb coredump downloads. Only enable if you need to retrive one
# - name: Set core dumps to get stored in /cores
# run: |
# sudo mkdir /cores
# sudo chmod 777 /cores
# # Core filenames will be of the form executable.pid.timestamp:
# sudo bash -c 'echo "/cores/%e.%p.%t" > /proc/sys/kernel/core_pattern'
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
ulimit -c unlimited
ulimit -c
node packages/bun-internal-test/src/runner.node.mjs || true
# - uses: actions/upload-artifact@v4
# if: steps.test.outputs.failing_tests != ''
# with:
# name: cores
# path: /cores
# if-no-files-found: "error"
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

483
.github/workflows/bun-mac-aarch64.yml vendored Normal file
View File

@@ -0,0 +1,483 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
LLVM_VERSION: 16
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
- cpu: native
arch: aarch64
tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v4
# - name: Checkout submodules
# run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
ARCH=${{ matrix.arch }}
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 15
strategy:
matrix:
include:
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
runner: macos-13-xlarge
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install go sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-aarch64.zip"
unzip bun-darwin-aarch64.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-aarch64/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Hash submodule versions
run: |
print_data() {
git submodule | grep -v WebKit
llvm-config --version
rustc --version
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
}
echo "sha=$(print_data | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
id: submodule-versions
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
- name: Compile submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
env:
CPU_TARGET: ${{ matrix.cpu }}
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
mkdir -p $BUN_DEPS_OUT_DIR
bash ./scripts/clean-dependencies.sh
bash ./scripts/all-dependencies.sh
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
runner: macos-13-xlarge
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install go sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-aarch64.zip"
unzip bun-darwin-aarch64.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-aarch64/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
# TODO: replace with sccache
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
- name: Compile C++
env:
CPU_TARGET: ${{ matrix.cpu }}
SOURCE_DIR: ${{ github.workspace }}
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
mkdir -p $OBJ_DIR
cd $OBJ_DIR
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS-link:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-zig, macOS-cpp, macOS-dependencies]
timeout-minutes: 60
permissions: write-all
strategy:
matrix:
include:
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
runner: macos-13-xlarge
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
ref: ${{github.sha}}
clean: true
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install ccache llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv openssl@1.1 ninja --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-${{matrix.arch}}.zip"
unzip bun-darwin-${{matrix.arch}}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-${{matrix.arch}}/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
run: |
SRC_DIR=$PWD
mkdir ${{runner.temp}}/link-build
cd ${{runner.temp}}/link-build
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="${{runner.temp}}/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
- name: Zip
run: |
cd ${{runner.temp}}/link-build
chmod +x bun-profile bun
mkdir -p ${{matrix.tag}}-profile/ ${{matrix.tag}}/
mv bun-profile ${{matrix.tag}}-profile/bun-profile
mv bun ${{matrix.tag}}/bun
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/link-build/${{matrix.tag}}.zip,${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS-link]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-aarch64
runner: macos-13-xlarge
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
- id: bun-version-check
name: Bun version check
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -0,0 +1,467 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
LLVM_VERSION: 16
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
macos-object-files:
name: macOS Object
runs-on: namespace-profile-zig-build
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
# arch: x86_64
# tag: bun-obj-darwin-x64
# - cpu: native
# arch: aarch64
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v4
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
with:
context: .
push: false
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
ARCH=${{ matrix.arch }}
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 15
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
- name: Hash submodule versions
run: |
print_data() {
git submodule | grep -v WebKit
llvm-config --version
rustc --version
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
}
echo "sha=$(print_data | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
id: submodule-versions
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
- name: Compile submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
env:
CPU_TARGET: ${{ matrix.cpu }}
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
mkdir -p $BUN_DEPS_OUT_DIR
bash ./scripts/clean-dependencies.sh
bash ./scripts/all-dependencies.sh
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64-baseline.zip"
unzip bun-darwin-x64-baseline.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-x64-baseline/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
# TODO: replace with sccache
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
- name: Compile C++
env:
CPU_TARGET: ${{ matrix.cpu }}
SOURCE_DIR: ${{ github.workspace }}
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
mkdir -p $OBJ_DIR
cd $OBJ_DIR
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files, macOS-dependencies]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install ccache llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv openssl@1.1 ninja --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64-baseline.zip"
unzip bun-darwin-x64-baseline.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-x64-baseline/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
run: |
SRC_DIR=$PWD
mkdir ${{runner.temp}}/link-build
cd ${{runner.temp}}/link-build
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="${{runner.temp}}/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
- name: Zip
run: |
cd ${{runner.temp}}/link-build
chmod +x bun-profile bun
mkdir -p ${{matrix.tag}}-profile/ ${{matrix.tag}}/
mv bun-profile ${{matrix.tag}}-profile/bun-profile
mv bun ${{matrix.tag}}/bun
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/link-build/${{matrix.tag}}.zip,${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: macOS Test
runs-on: ${{ matrix.runner }}
needs: [macOS]
# if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
if: false
permissions:
pull-requests: write
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-12-large
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
- id: bun-version-check
name: Bun version check
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
Hey @${{ github.actor }},
${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

461
.github/workflows/bun-mac-x64.yml vendored Normal file
View File

@@ -0,0 +1,461 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
LLVM_VERSION: 16
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
arch: x86_64
tag: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
with:
context: .
push: false
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
ARCH=${{ matrix.arch }}
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 15
strategy:
matrix:
include:
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
- name: Hash submodule versions
run: |
print_data() {
git submodule | grep -v WebKit
llvm-config --version
rustc --version
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
}
echo "sha=$(print_data | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
id: submodule-versions
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
- name: Compile submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
env:
CPU_TARGET: ${{ matrix.cpu }}
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
mkdir -p $BUN_DEPS_OUT_DIR
bash ./scripts/clean-dependencies.sh
bash ./scripts/all-dependencies.sh
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64.zip"
unzip bun-darwin-x64.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-x64/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
# TODO: replace with sccache
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
- name: Compile C++
env:
CPU_TARGET: ${{ matrix.cpu }}
SOURCE_DIR: ${{ github.workspace }}
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
mkdir -p $OBJ_DIR
cd $OBJ_DIR
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macOS-zig, macOS-dependencies]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
package: bun-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Install system dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install ccache llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv openssl@1.1 ninja --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64-baseline.zip"
unzip bun-darwin-x64-baseline.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv bun-darwin-x64-baseline/bun ${{ runner.temp }}/.bun/bin/bun
chmod +x ${{ runner.temp }}/.bun/bin/bun
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
run: |
SRC_DIR=$PWD
mkdir ${{runner.temp}}/link-build
cd ${{runner.temp}}/link-build
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="${{runner.temp}}/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
- name: Zip
run: |
cd ${{runner.temp}}/link-build
chmod +x bun-profile bun
mkdir -p ${{matrix.tag}}-profile/ ${{matrix.tag}}/
mv bun-profile ${{matrix.tag}}-profile/bun-profile
mv bun ${{matrix.tag}}/bun
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/link-build/${{matrix.tag}}.zip,${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64
runner: macos-12-large
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
- id: bun-version-check
name: Bun version check
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TMPDIR: ${{runner.temp}}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

269
.github/workflows/bun-release.yml vendored Normal file
View File

@@ -0,0 +1,269 @@
name: bun-release
concurrency: release
env:
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
on:
release:
types:
- published
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
inputs:
is-latest:
description: Is this the latest release?
type: boolean
default: false
tag:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
required: true
use-docker:
description: Should Docker images be released?
type: boolean
default: false
use-npm:
description: Should npm packages be published?
type: boolean
default: false
use-homebrew:
description: Should binaries be released to Homebrew?
type: boolean
default: false
use-s3:
description: Should binaries be uploaded to S3?
type: boolean
default: false
use-types:
description: Should types be released to npm?
type: boolean
default: false
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: write
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Install Dependencies
run: bun install
- name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Install Dependencies
run: bun install
- name: Release
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- name: Setup Bun
if: ${{ env.BUN_VERSION != 'canary' }}
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: oven-sh/setup-bun@v1
with:
bun-version: "canary" # Must be 'canary' so tag is correct
- name: Install Dependencies
run: bun install
- name: Setup Tag
if: ${{ env.BUN_VERSION == 'canary' }}
run: |
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
- name: Release (canary)
if: ${{ env.BUN_VERSION == 'canary' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
- name: Release (latest)
if: ${{ env.BUN_LATEST == 'true' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
include:
- variant: debian
suffix: ""
- variant: debian
suffix: -debian
- variant: slim
suffix: -slim
dir: debian-slim
- variant: alpine
suffix: -alpine
- variant: distroless
suffix: -distroless
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
flavor: |
latest=false
tags: |
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v5
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.BUN_VERSION }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
permissions:
contents: read
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- name: Update Tap
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
- name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
commit_message: Release ${{ env.BUN_VERSION }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Install Dependencies
run: bun install
- name: Release
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

41
.github/workflows/bun-types-tests.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: bun-types
on:
push:
paths:
- "packages/bun-types/**"
branches: [main]
pull_request:
paths:
- "packages/bun-types/**"
jobs:
tests:
name: type-tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: |
bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test

503
.github/workflows/bun-windows.yml vendored Normal file
View File

@@ -0,0 +1,503 @@
name: bun-windows
concurrency:
group: bun-windows-${{ github.ref }}
cancel-in-progress: true
env:
# note: in other files, this version is only the major version, but for windows it is the full version
LLVM_VERSION: 16.0.6
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
tag: bun-windows
# TODO: wire this up to workflow_dispatch.
# github's expression syntax makes this hard to set a default to true
canary: true
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
- "packages/bun-uws/src/**/*"
- "CMakeLists.txt"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# inputs:
# is-canary:
# type: boolean
# description: Is Canary Build?
# default: true
jobs:
windows-zig:
strategy:
fail-fast: false
matrix:
cpu: [haswell, nehalem]
arch: [x86_64]
name: Zig Build
runs-on: namespace-profile-zig-build
timeout-minutes: 60
if: github.repository_owner == 'oven-sh'
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Calculate Canary Revision
if: ${{ env.canary == 'true' }}
id: canary
run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT
- name: Compile Zig Object
uses: docker/build-push-action@v5
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
ARCH=${{ matrix.arch }}
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-windows-msvc
GIT_SHA=${{ github.sha }}
CANARY=${{ env.canary == 'true' && steps.canary.outputs.canary_revision || '0' }}
ZIG_OPTIMIZE=ReleaseSafe
# TODO(@paperdave): enable ASSERTIONS=1
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
windows-dependencies:
name: Dependencies
runs-on: windows
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
cpu: [haswell, nehalem]
arch: [x86_64]
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
- name: Clone Submodules
run: .\scripts\update-submodules.ps1
- name: Hash submodule versions
shell: pwsh
run: |
$data = "$(& {
git submodule | Where-Object { $_ -notmatch 'WebKit' }
clang --version
rustc --version
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
echo 1
})"
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
echo "sha=${hash}" >> $env:GITHUB_OUTPUT
id: submodule-versions
- name: Try fetch dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
with:
path: bun-deps
key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }}
- name: Install LLVM ${{ env.LLVM_VERSION }}
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
with:
version: ${{ env.LLVM_VERSION }}
- name: Install Ninja
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
run: choco install -y ninja
- name: Build Dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
run: |
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
Invoke-WebRequest -Uri "https://www.nasm.us/pub/nasm/releasebuilds/2.16.01/win64/nasm-2.16.01-win64.zip" -OutFile nasm.zip
Expand-Archive nasm.zip (mkdir -Force "nasm")
$Nasm = (Get-ChildItem "nasm")
$env:Path += ";${Nasm}"
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
.\scripts\all-dependencies.ps1
- name: Upload Dependencies
uses: actions/upload-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps/
if-no-files-found: "error"
- name: Cache Dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
with:
path: bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
# TODO(@paperdave): stop relying on this and use bun.exe to build itself.
# we cant do that now because there isn't a tagged release to use.
#
# and at the time of writing, the minimum canary required to work is not
# yet released as it is the one *this* commit.
windows-codegen:
name: Codegen
runs-on: ubuntu-latest
timeout-minutes: 10
if: github.repository_owner == 'oven-sh'
strategy:
fail-fast: false
matrix:
arch: [x86_64]
steps:
- uses: actions/checkout@v4
- run: |
curl -fsSL $BUN_DOWNLOAD_URL_BASE/bun-linux-x64.zip > bun.zip
unzip bun.zip
export PATH="$PWD/bun-linux-x64:$PATH"
./scripts/cross-compile-codegen.sh win32 x64
# Sort of a hack to do this step in the codegen stage
- name: Calculate Canary Revision
if: ${{ env.canary == 'true' }}
run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
- uses: actions/upload-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build-codegen-win32-x64/
if-no-files-found: "error"
windows-cpp:
name: C++ Build
needs: [windows-codegen]
runs-on: windows
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
cpu: [haswell, nehalem]
arch: [x86_64]
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4
- uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
with:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
- name: Build C++
run: |
# Using SCCache was blocked by an issue that is fixed in a newer version.
# TODO UPDATE
# $sczip = "sccache-v0.6.0-x86_64-pc-windows-msvc"
# Invoke-WebRequest -Uri "https://github.com/mozilla/sccache/releases/download/v0.6.0/${sczip}.zip" -OutFile "${sczip}.zip"
# Expand-Archive "${sczip}.zip"
# $env:SCCACHE_BUCKET="bun"
# $env:SCCACHE_REGION="auto"
# $env:SCCACHE_S3_USE_SSL="true"
# $env:SCCACHE_ENDPOINT="${{ secrets.CACHE_S3_ENDPOINT }}"
# $env:AWS_ACCESS_KEY_ID="${{ secrets.CACHE_S3_ACCESS_KEY_ID }}"
# $env:AWS_SECRET_ACCESS_KEY="${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}"
# $SCCACHE="$PWD/${sczip}/${sczip}/sccache.exe"
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
.\scripts\update-submodules.ps1
.\scripts\build-libuv.ps1 -CloneOnly $True
cd build
# "-DCCACHE_PROGRAM=${SCCACHE}"
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_CPP_ONLY=1 ${{ matrix.cpu == 'nehalem' && '-DUSE_BASELINE_BUILD=1' || '' }}
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
- uses: actions/upload-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: build/bun-cpp-objects.a
if-no-files-found: "error"
windows-link:
strategy:
fail-fast: false
matrix:
cpu: [haswell, nehalem]
arch: [x86_64]
name: Link
needs: [windows-dependencies, windows-codegen, windows-cpp, windows-zig]
runs-on: windows-small
if: github.repository_owner == 'oven-sh'
timeout-minutes: 30
permissions: write-all
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4
- uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
with:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
- name: Download Dependencies
uses: actions/download-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps
- name: Download Zig Object
uses: actions/download-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-zig
- name: Download C++ Objects
uses: actions/download-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-cpp
- name: Link
run: |
.\scripts\update-submodules.ps1
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
Set-Location build
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_LINK_ONLY=1 `
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
"-DBUN_ZIG_OBJ=$(Resolve-Path ../bun-zig/bun-zig.o)" `
${{ matrix.cpu == 'nehalem' && '-DUSE_BASELINE_BUILD=1' || '' }}
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
ninja -v
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
- name: Package
run: |
$Dist = mkdir -Force "${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}"
cp -r build\bun.exe "$Dist\bun.exe"
Compress-Archive "$Dist" "${Dist}.zip"
$Dist = "$Dist-profile"
MkDir -Force "$Dist"
cp -r build\bun.exe "$Dist\bun.exe"
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive "$Dist" "$Dist.zip"
- uses: actions/upload-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip,${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
**[Build Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})** | [Commit](https://github.com/oven-sh/bun/commits/${{github.sha}})
windows-test:
name: Test
runs-on: windows-small
needs: [windows-link]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 180
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
# TODO: test baseline, disabled due to noise
cpu: [haswell]
arch: [x86_64]
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- id: checkout
name: Checkout
uses: actions/checkout@v4
with:
submodules: false
- id: download
name: Download Release
uses: actions/download-artifact@v4
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{runner.temp}}/release
- name: Install Bun
run: |
cd ${{runner.temp}}/release
unzip ${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip
cd ${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
pwd >> $env:GITHUB_PATH
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install dependencies
run: |
# bun install --verbose
# bun install --cwd=test --verbose
# bun install --cwd=packages/bun-internal-test --verbose
npm install
cd test && npm install
cd ../packages/bun-internal-test && npm install
cd ../..
- id: test
name: Run tests
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
run: |
try {
$ErrorActionPreference = "SilentlyContinue"
$null = node packages/bun-internal-test/src/runner.node.mjs ${{runner.temp}}/release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe || $true
} catch {}
$ErrorActionPreference = "Stop"
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK_WINTEST }}
status: "failure"
noprefix: true
nocontext: true
description: |
### ❌🪟 [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are **${{ steps.test.outputs.failing_test_count }} failing tests** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
${{ steps.test.outputs.failing_tests }}
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
### ❌🪟 [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
${{ steps.test.outputs.regressing_tests }}
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
- name: Comment on PR
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
message: |
### ❌🪟 @${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
${{ steps.test.outputs.regressing_tests }}
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
- name: Uncomment on PR
if: steps.test.outputs.regressing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
mode: upsert
create_if_not_exists: false
message: |
✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved.
- id: fail
name: Fail the build
if: steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
run: exit 1

View File

@@ -1,35 +0,0 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@beta
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}

View File

@@ -1,58 +0,0 @@
name: Codex Test Sync
on:
pull_request:
types: [labeled, opened]
env:
BUN_VERSION: "1.2.15"
jobs:
sync-node-tests:
runs-on: ubuntu-latest
if: |
(github.event.action == 'labeled' && github.event.label.name == 'codex') ||
(github.event.action == 'opened' && contains(github.event.pull_request.labels.*.name, 'codex')) ||
contains(github.head_ref, 'codex')
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v44
with:
files: |
test/js/node/test/parallel/**/*.{js,mjs,ts}
test/js/node/test/sequential/**/*.{js,mjs,ts}
- name: Sync tests
if: steps.changed-files.outputs.any_changed == 'true'
shell: bash
run: |
echo "Changed test files:"
echo "${{ steps.changed-files.outputs.all_changed_files }}"
# Process each changed test file
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
# Extract test name from file path
test_name=$(basename "$file" | sed 's/\.[^.]*$//')
echo "Syncing test: $test_name"
bun node:test:cp "$test_name"
done
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Sync Node.js tests with upstream"

View File

@@ -1,85 +0,0 @@
name: C++ Linter comment
permissions:
actions: read
pull-requests: write
on:
workflow_run:
workflows:
- lint-cpp
types:
- completed
jobs:
comment-lint:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Comment
runs-on: ubuntu-latest
steps:
- name: Download Comment
uses: actions/download-artifact@v4
with:
name: format.log
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: PR Number
uses: actions/download-artifact@v4
with:
name: pr-number.txt
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Did Fail
uses: actions/download-artifact@v4
with:
name: did_fail.txt
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Setup Environment
id: env
shell: bash
run: |
# Copy to outputs
echo "pr-number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT
{
echo 'text_output<<EOF'
cat format.log
echo EOF
} >> "$GITHUB_OUTPUT"
echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ steps.env.outputs.pr-number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
- name: Update Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.env.outputs.did_fail != '0'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ steps.env.outputs.pr-number }}
body: |
@${{ github.actor }}, `clang-tidy` had something to share with you about your code:
```cpp
${{ steps.env.outputs.text_output }}
```
Commit: ${{ github.event.workflow_run.head_sha || github.sha }}
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
edit-mode: replace
- name: Update Previous Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ steps.env.outputs.pr-number }}
body: |
clang-tidy nits are fixed! Thank you.
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
edit-mode: replace

View File

@@ -1,22 +0,0 @@
name: Docs
on:
push:
paths:
- "docs/**"
- "packages/bun-types/**.d.ts"
- "CONTRIBUTING.md"
branches:
- main
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
steps:
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
- name: Trigger Webhook
run: |
curl -v ${{ secrets.VERCEL_DEPLOY_HOOK }}

View File

@@ -1,59 +1,46 @@
name: autofix.ci
name: autofix.ci # Must be named this for autofix.ci to work
permissions:
contents: read
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
push:
branches: ["main"]
branches:
- main
env:
BUN_VERSION: "1.2.11"
LLVM_VERSION: "19.1.7"
LLVM_VERSION_MAJOR: "19"
ZIG_VERSION: 0.12.0-dev.1828+225fe6ddb
jobs:
autofix:
name: Format
runs-on: ubuntu-latest
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
format:
name: format
runs-on: ${{ vars.RUNNER_LINUX_X64 || 'ubuntu-latest' }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Configure Git
run: |
git config --global core.autocrlf true
git config --global core.ignorecase true
git config --global core.precomposeUnicode true
with:
sparse-checkout: |
.github
src
packages
test
bench
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
bun-version: "1.0.21"
- name: Setup Zig
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
with:
version: ${{ env.ZIG_VERSION }}
- name: Install Dependencies
run: |
bun install
- name: Install LLVM
- name: Format
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Setup Zig
uses: mlugg/setup-zig@v1
with:
version: 0.14.0
- name: Zig Format
run: |
bun scripts/zig-remove-unreferenced-top-level-decls.ts src/
zig fmt src
bun scripts/sortImports src
zig fmt src
- name: Prettier Format
run: |
bun run prettier
- name: Clang Format
run: |
bun run clang-format
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
bun fmt
bun fmt:zig
- name: Commit # https://autofix.ci/
uses: autofix-ci/action@d3e591514b99d0fca6779455ff8338516663f7cc

View File

@@ -1,41 +0,0 @@
name: Glob Sources
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
env:
BUN_VERSION: "1.2.11"
jobs:
glob-sources:
name: Glob Sources
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure Git
run: |
git config --global core.autocrlf true
git config --global core.ignorecase true
git config --global core.precomposeUnicode true
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Glob sources
run: bun scripts/glob-sources.mjs
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun scripts/glob-sources.mjs`"

View File

@@ -1,184 +0,0 @@
name: Issue Labeled
env:
BUN_VERSION: 1.1.44
on:
issues:
types: [labeled]
jobs:
# on-bug:
# runs-on: ubuntu-latest
# if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
# permissions:
# issues: write
# steps:
# - name: Checkout
# uses: actions/checkout@v4
# with:
# sparse-checkout: |
# scripts
# .github
# CMakeLists.txt
# - name: Setup Bun
# uses: ./.github/actions/setup-bun
# with:
# bun-version: ${{ env.BUN_VERSION }}
# - name: "categorize bug"
# id: add-labels
# env:
# GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
# GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
# ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
# shell: bash
# run: |
# echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
# LABELS=$(bun scripts/label-issue.ts)
# echo "labels=$LABELS" >> $GITHUB_OUTPUT
# - name: Add labels
# uses: actions-cool/issues-helper@v3
# if: steps.add-labels.outputs.labels != ''
# with:
# actions: "add-labels"
# token: ${{ secrets.GITHUB_TOKEN }}
# issue-number: ${{ github.event.issue.number }}
# labels: ${{ steps.add-labels.outputs.labels }}
on-labeled:
runs-on: ubuntu-latest
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
permissions:
issues: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
scripts
.github
CMakeLists.txt
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: "add platform and command label"
id: add-labels
if: github.event.label.name == 'crash'
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
shell: bash
run: |
LABELS=$(bun scripts/read-issue.ts)
bun scripts/is-outdated.ts
if [[ -f "is-outdated.txt" ]]; then
echo "is-outdated=true" >> $GITHUB_OUTPUT
fi
if [[ -f "outdated.txt" ]]; then
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "is-very-outdated.txt" ]]; then
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
LABELS="$LABELS,old-version"
else
echo "is-very-outdated=false" >> $GITHUB_OUTPUT
fi
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
echo "labels=$LABELS" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Remove old labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash' && steps.add-labels.outputs.is-very-outdated == 'false'
with:
actions: "remove-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: old-version
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
with:
actions: "add-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days.

View File

@@ -1,21 +0,0 @@
name: Lint
on:
pull_request:
workflow_dispatch:
env:
BUN_VERSION: "1.2.10"
jobs:
lint-js:
name: "Lint JavaScript"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Lint
run: bun lint

View File

@@ -1,89 +0,0 @@
name: Comment on updated submodule
on:
pull_request_target:
paths:
- "src/generated_versions_list.zig"
- ".github/workflows/on-submodule-update.yml"
jobs:
comment:
name: Comment
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: Checkout current
uses: actions/checkout@v4
with:
sparse-checkout: |
src
- name: Hash generated versions list
id: hash
run: |
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Checkout base
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref }}
sparse-checkout: |
src
- name: Hash base
id: base
run: |
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Compare
id: compare
run: |
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
echo "changed=true" >> $GITHUB_OUTPUT
else
echo "changed=false" >> $GITHUB_OUTPUT
fi
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment submodule-updated -->
- name: Write Warning Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.compare.outputs.changed == 'true'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
<!-- generated-comment submodule-updated -->
- name: Add labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'true'
with:
actions: "add-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Remove labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false'
with:
actions: "remove-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Delete outdated comment
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
with:
actions: "delete-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.comment.outputs.comment-id }}

View File

@@ -1,55 +0,0 @@
name: Packages CI
on:
push:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
pull_request:
branches:
- main
paths:
- "packages/**"
- .prettierrc
- .prettierignore
- tsconfig.json
- oxlint.json
- "!**/*.md"
env:
BUN_VERSION: "canary"
jobs:
bun-plugin-svelte:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install dependencies
run: |
bun install
pushd ./packages/bun-plugin-svelte && bun install
- name: Lint
run: |
bunx oxlint@0.15 --format github --deny-warnings
bunx prettier --config ../../.prettierrc --check .
working-directory: ./packages/bun-plugin-svelte
- name: Check types
run: bun check:types
working-directory: ./packages/bun-plugin-svelte
- name: Test
run: bun test
working-directory: ./packages/bun-plugin-svelte

View File

@@ -1,368 +0,0 @@
# TODO: Move this to bash scripts intead of Github Actions
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
name: Release
concurrency: release
env:
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
on:
release:
types:
- published
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
inputs:
is-latest:
description: Is this the latest release?
type: boolean
default: false
tag:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
required: true
use-docker:
description: Should Docker images be released?
type: boolean
default: false
use-npm:
description: Should npm packages be published?
type: boolean
default: false
use-homebrew:
description: Should binaries be released to Homebrew?
type: boolean
default: false
use-s3:
description: Should binaries be uploaded to S3?
type: boolean
default: false
use-types:
description: Should types be released to npm?
type: boolean
default: false
use-definitelytyped:
description: "Should types be PR'd to DefinitelyTyped?"
type: boolean
default: false
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: write
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
- name: Install Dependencies
run: bun install
- name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
with:
# To workaround issue
ref: main
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
- name: Install Dependencies
run: bun install
- name: Release
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- name: Setup Bun
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "canary" # Must be 'canary' so tag is correct
- name: Install Dependencies
run: bun install
- name: Setup Tag
if: ${{ env.BUN_VERSION == 'canary' }}
run: |
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
- name: Release (canary)
if: ${{ env.BUN_VERSION == 'canary' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
- name: Release (latest)
if: ${{ env.BUN_LATEST == 'true' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
definitelytyped:
name: Make pr to DefinitelyTyped to update `bun-types` version
runs-on: ubuntu-latest
needs: npm-types
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
permissions:
contents: read
steps:
- name: Checkout (DefinitelyTyped)
uses: actions/checkout@v4
with:
repository: DefinitelyTyped/DefinitelyTyped
- name: Checkout (bun)
uses: actions/checkout@v4
with:
path: bun
- name: Setup Bun
uses: ./bun/.github/actions/setup-bun
with:
bun-version: "1.2.0"
- id: bun-version
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
- name: Update bun-types version in package.json
run: |
bun -e '
const file = Bun.file("./types/bun/package.json");
const json = await file.json();
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
json.dependencies["bun-types"] = version;
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
await file.write(JSON.stringify(json, null, 4) + "\n");
'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
with:
token: ${{ secrets.ROBOBUN_TOKEN }}
add-paths: ./types/bun/package.json
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
body: |
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
https://bun.com/blog/${{ env.BUN_VERSION }}
push-to-fork: oven-sh/DefinitelyTyped
branch: ${{env.BUN_VERSION}}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
include:
- variant: debian
suffix: ""
- variant: debian
suffix: -debian
- variant: slim
suffix: -slim
dir: debian-slim
- variant: alpine
suffix: -alpine
- variant: distroless
suffix: -distroless
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v3
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v5
with:
images: oven/bun
flavor: |
latest=false
tags: |
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v6
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.BUN_VERSION }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
permissions:
contents: read
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- name: Update Tap
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
- name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
commit_message: Release ${{ env.BUN_VERSION }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.0"
- name: Install Dependencies
run: bun install
- name: Release
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun
notify-sentry:
name: Notify Sentry
runs-on: ubuntu-latest
needs: s3
steps:
- name: Notify Sentry
uses: getsentry/action-release@v1.7.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
ignore_missing: true
ignore_empty: true
version: ${{ env.BUN_VERSION }}
environment: production
bump:
name: "Bump version"
runs-on: ubuntu-latest
if: ${{ github.event_name != 'schedule' }}
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
if: ${{ env.BUN_LATEST == 'true' }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
if: ${{ env.BUN_LATEST == 'true' }}
with:
bun-version: "1.2.0"
- name: Bump version
uses: ./.github/actions/bump
if: ${{ env.BUN_LATEST == 'true' }}
with:
version: ${{ env.BUN_VERSION }}
token: ${{ github.token }}

View File

@@ -1,30 +0,0 @@
name: Close inactive issues
on:
# schedule:
# - cron: "15 * * * *"
workflow_dispatch:
jobs:
close-issues:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v5
with:
days-before-issue-close: 5
any-of-issue-labels: "needs repro,waiting-for-author"
exempt-issue-labels: "neverstale"
exempt-pr-labels: "neverstale"
remove-stale-when-updated: true
stale-issue-label: "stale"
stale-pr-label: "stale"
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
days-before-pr-stale: 30
days-before-pr-close: 14
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
close-pr-message: "This pull request has been closed due to inactivity."
repo-token: ${{ github.token }}
operations-per-run: 1000

View File

@@ -1,32 +0,0 @@
name: Test Bump version
on:
workflow_dispatch:
inputs:
version:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
required: true
env:
BUN_VERSION: "1.2.0"
jobs:
bump:
name: "Bump version"
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Bump version
uses: ./.github/actions/bump
with:
version: ${{ inputs.version }}
token: ${{ github.token }}

View File

@@ -1,19 +0,0 @@
name: Typos
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Spellcheck
uses: crate-ci/typos@v1.29.4
with:
files: docs/**/*

View File

@@ -1,99 +0,0 @@
name: Update c-ares
on:
schedule:
- cron: "0 4 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check c-ares version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildCares.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildCares.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildCares.cmake
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-cares-${{ github.run_number }}
body: |
## What does this PR do?
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -1,99 +0,0 @@
name: Update libarchive
on:
schedule:
- cron: "0 3 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libarchive version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibArchive.cmake
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libarchive-${{ github.run_number }}
body: |
## What does this PR do?
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -1,99 +0,0 @@
name: Update libdeflate
on:
schedule:
- cron: "0 2 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libdeflate version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibDeflate.cmake
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libdeflate-${{ github.run_number }}
body: |
## What does this PR do?
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -1,99 +0,0 @@
name: Update lolhtml
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lolhtml version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLolHtml.cmake
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lolhtml-${{ github.run_number }}
body: |
## What does this PR do?
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -1,99 +0,0 @@
name: Update lshpack
on:
schedule:
- cron: "0 5 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lshpack version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLshpack.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLshpack.cmake
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lshpack-${{ github.run_number }}
body: |
## What does this PR do?
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -1,82 +0,0 @@
name: Daily Root Certs Update Check
on:
schedule:
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
workflow_dispatch: # Allows manual trigger
jobs:
check-and-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Generate root certs and capture output
id: generate-certs
run: |
cd packages/bun-usockets/
OUTPUT=$(bun generate-root-certs.mjs -v)
echo "cert_output<<EOF" >> $GITHUB_ENV
echo "$OUTPUT" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
- name: Check for changes and stage files
id: check-changes
run: |
if [[ -n "$(git status --porcelain)" ]]; then
echo "Found changes, staging modified files..."
git config --global user.name "github-actions[bot]"
git config --global user.email "github-actions[bot]@users.noreply.github.com"
# Get list of modified files and add them
git status --porcelain | while read -r status file; do
# Remove leading status and whitespace
file=$(echo "$file" | sed 's/^.* //')
echo "Adding changed file: $file"
git add "$file"
done
echo "changes=true" >> $GITHUB_OUTPUT
# Store the list of changed files
CHANGED_FILES=$(git status --porcelain)
echo "changed_files<<EOF" >> $GITHUB_ENV
echo "$CHANGED_FILES" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No changes detected"
echo "changes=false" >> $GITHUB_OUTPUT
fi
- name: Create Pull Request
if: steps.check-changes.outputs.changes == 'true'
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
title: "update(root_certs) $(date +'%Y-%m-%d')"
body: |
Automated root certificates update
${{ env.cert_output }}
## Changed Files:
```
${{ env.changed_files }}
```
branch: certs/update-root-certs-${{ github.run_number }}
base: main
delete-branch: true
labels:
- "automation"
- "root-certs"

View File

@@ -1,111 +0,0 @@
name: Update SQLite3
on:
schedule:
- cron: "0 6 * * 0" # Run weekly
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check SQLite version
id: check-version
run: |
set -euo pipefail
# Get current version from the header file using SQLITE_VERSION_NUMBER
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
if [ -z "$CURRENT_VERSION_NUM" ]; then
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
exit 1
fi
# Convert numeric version to semantic version for display
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
# Fetch SQLite download page
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
if [ -z "$DOWNLOAD_PAGE" ]; then
echo "Error: Failed to fetch SQLite download page"
exit 1
fi
# Extract latest version and year from the amalgamation link
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
echo "Error: Could not extract latest version info"
exit 1
fi
# Convert numeric version to semantic version for display
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
# Debug output
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
- name: Update SQLite if needed
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
run: |
set -euo pipefail
TEMP_DIR=$(mktemp -d)
cd $TEMP_DIR
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
# Download and extract latest version
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
# Add header comment and copy files
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
- name: Create Pull Request
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
src/bun.js/bindings/sqlite/sqlite3.c
src/bun.js/bindings/sqlite/sqlite3_local.h
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
body: |
## What does this PR do?
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

View File

@@ -1,99 +0,0 @@
name: Update zstd
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check zstd version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/CloneZstd.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in CloneZstd.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in CloneZstd.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/facebook/zstd/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/CloneZstd.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/CloneZstd.cmake
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-zstd-${{ github.run_number }}
body: |
## What does this PR do?
Updates zstd to version ${{ steps.check-version.outputs.tag }}
Compare: https://github.com/facebook/zstd/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-zstd.yml)

355
.gitignore vendored
View File

@@ -1,187 +1,168 @@
.DS_Store
.env
.envrc
.eslintcache
.idea
.next
.ninja_deps
.ninja_log
.npm
.npm.gz
.parcel-cache
.swcrc
.trace
.uuid
.vs
.vscode/clang*
.vscode/cpp*
.zig-cache
.bake-debug
*.a
*.bc
*.big
*.blob
*.bun
*.crash
*.database
*.db
*.dmg
*.dSYM
*.generated.ts
*.jsb
*.lib
*.log
*.o
*.out.js
*.out.refresh.js
*.pdb
*.sqlite
*.swp
*.tmp
*.trace
*.wat
*.zip
**/.verdaccio-db.json
**/*.dir
**/*.pdb
**/*.sln*
**/*.vcxproj*
**/package-lock.json
/.cache
/.webkit-cache
/build-*/
/bun-webkit
/kcov-out
/test-report.json
/test-report.md
/test.js
/test.ts
/test.zig
/testdir
build
build.ninja
bun-binary
bun-mimalloc
bun-nomimalloc
bun-singlehtreaded
bun-test-scratch
bun-zigld
cmake_install.cmake
CMakeCache.txt
CMakeFiles
cold-jsc-start
cold-jsc-start.d
compile_commands.json
cover
coverage
coverv
dist
esbuilddir
examples/lotta-modules/bun-nofscache
examples/lotta-modules/bun-old
examples/lotta-modules/bun-yday
failing-tests.txt
github
make-dev-stats.csv
misctools/fetch
misctools/machbench
misctools/sha
myscript.sh
node_modules
node_modules_*
out
out.*
outcss
outdir
outdir/
packages/*/*.wasm
packages/bun-*/*.o
packages/bun-*/bun
packages/bun-*/bun-profile
packages/bun-*/debug-bun
packages/bun-cli/bin/*
packages/bun-cli/postinstall.js
packages/bun-wasm/*.cjs
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.js
packages/bun-wasm/*.map
packages/bun-wasm/*.mjs
packages/debug-*
parceldist
pnpm-lock.yaml
profile.json
README.md.template
release/
scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/generated_enum_extractor.zig
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/debug-bindings-obj
src/deps/zig-clap/.gitattributes
src/deps/zig-clap/.github
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/fallback.version
src/js/out/DebugPath.h
src/js/out/functions*
src/js/out/modules*
src/js/out/tmp
src/node-fallbacks/node_modules
src/node-fallbacks/out/*
src/runtime.version
src/tests.zig
test.txt
test/js/bun/glob/fixtures
test/node.js/upstream
tsconfig.tsbuildinfo
txt.js
x64
yarn.lock
zig-cache
zig-out
test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
tmp
codegen-for-zig-team.tar.gz
# Dependencies
/vendor
# Dependencies (before CMake)
# These can be removed in the far future
/src/bun.js/WebKit
/src/deps/boringssl
/src/deps/brotli
/src/deps/c*ares
/src/deps/libarchive
/src/deps/libdeflate
/src/deps/libuv
/src/deps/lol*html
/src/deps/ls*hpack
/src/deps/mimalloc
/src/deps/picohttpparser
/src/deps/tinycc
/src/deps/WebKit
/src/deps/zig
/src/deps/zlib
/src/deps/zstd
# Generated files
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}
*.bun-build
.DS_Store
zig-cache
packages/*/*.wasm
*.o
*.a
profile.json
.env
node_modules
.envrc
.swcrc
yarn.lock
dist
*.tmp
*.log
*.out.js
*.out.refresh.js
**/package-lock.json
build
*.wat
zig-out
pnpm-lock.yaml
README.md.template
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/deps/zig-clap/.github
src/deps/zig-clap/.gitattributes
out
outdir
.trace
cover
coverage
coverv
*.trace
github
out.*
out
.parcel-cache
esbuilddir
*.bun
parceldist
esbuilddir
outdir/
outcss
.next
txt.js
.idea
.vscode/cpp*
.vscode/clang*
node_modules_*
*.jsb
*.zip
bun-zigld
bun-singlehtreaded
bun-nomimalloc
bun-mimalloc
examples/lotta-modules/bun-yday
examples/lotta-modules/bun-old
examples/lotta-modules/bun-nofscache
src/node-fallbacks/out/*
src/node-fallbacks/node_modules
sign.json
release/
*.dmg
sign.*.json
packages/debug-*
packages/bun-cli/postinstall.js
packages/bun-*/bun
packages/bun-*/bun-profile
packages/bun-*/debug-bun
packages/bun-*/*.o
packages/bun-cli/postinstall.js
packages/bun-cli/bin/*
bun-test-scratch
misctools/fetch
src/deps/libiconv
src/deps/openssl
src/tests.zig
*.blob
src/deps/s2n-tls
.npm
.npm.gz
bun-binary
src/deps/PLCrashReporter/
*.dSYM
*.crash
misctools/sha
packages/bun-wasm/*.mjs
packages/bun-wasm/*.cjs
packages/bun-wasm/*.map
packages/bun-wasm/*.js
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
*.bc
src/fallback.version
src/runtime.version
*.sqlite
*.database
*.db
misctools/machbench
*.big
.eslintcache
/bun-webkit
src/deps/c-ares/build
src/bun.js/bindings-obj
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/testdir
/test.ts
/test.js
src/js/out/modules*
src/js/out/functions*
src/js/out/tmp
src/js/out/DebugPath.h
make-dev-stats.csv
.uuid
tsconfig.tsbuildinfo
test/js/bun/glob/fixtures
*.lib
*.pdb
CMakeFiles
build.ninja
.ninja_deps
.ninja_log
CMakeCache.txt
cmake_install.cmake
compile_commands.json
*.lib
x64
**/*.vcxproj*
**/*.sln*
**/*.dir
**/*.pdb
/.webkit-cache
/.cache
/src/deps/libuv
/build-*/
.vs
**/.verdaccio-db.json
/test-report.md
/test-report.json

85
.gitmodules vendored Normal file
View File

@@ -0,0 +1,85 @@
[submodule "src/javascript/jsc/WebKit"]
path = src/bun.js/WebKit
url = https://github.com/oven-sh/WebKit.git
ignore = dirty
depth = 1
update = none
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/picohttpparser"]
path = src/deps/picohttpparser
url = https://github.com/h2o/picohttpparser.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/mimalloc"]
path = src/deps/mimalloc
url = https://github.com/Jarred-Sumner/mimalloc.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/zlib"]
path = src/deps/zlib
url = https://github.com/cloudflare/zlib.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/libarchive"]
path = src/deps/libarchive
url = https://github.com/libarchive/libarchive.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/boringssl"]
path = src/deps/boringssl
url = https://github.com/oven-sh/boringssl.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/lol-html"]
path = src/deps/lol-html
url = https://github.com/cloudflare/lol-html
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/tinycc"]
path = src/deps/tinycc
url = https://github.com/Jarred-Sumner/tinycc.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/base64"]
path = src/deps/base64
url = https://github.com/aklomp/base64.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/ls-hpack"]
path = src/deps/ls-hpack
url = https://github.com/litespeedtech/ls-hpack.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false

View File

@@ -1 +0,0 @@
command source -C -s true -e true misctools/lldb/init.lldb

View File

@@ -1,2 +0,0 @@
# To learn more about git's mailmap: https://ntietz.com/blog/git-mailmap-for-name-changes
chloe caruso <git@paperclover.net> <me@paperdave.net>

View File

@@ -1,10 +1,5 @@
src/bun.js/WebKit
vendor
src/deps
test/snapshots
test/js/deno
test/node.js
src/react-refresh.js
*.min.js
test/snippets
test/js/node/test
bun.lock

View File

@@ -5,15 +5,6 @@
"useTabs": false,
"quoteProps": "preserve",
"overrides": [
{
"files": [".vscode/*.json"],
"options": {
"parser": "jsonc",
"quoteProps": "preserve",
"singleQuote": false,
"trailingComma": "all"
}
},
{
"files": ["*.md"],
"options": {

View File

@@ -1,2 +0,0 @@
[type.md]
extend-ignore-words-re = ["^ba"]

View File

@@ -3,21 +3,21 @@
{
"name": "Debug",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"compileCommands": "${workspaceFolder}/build/debug/compile_commands.json",
"compileCommands": "${workspaceFolder}/build/compile_commands.json",
"includePath": [
"${workspaceFolder}/build/bun-webkit/include",
"${workspaceFolder}/build/debug/codegen",
"${workspaceFolder}/build/codegen",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/vendor/boringssl/include/",
"${workspaceFolder}/vendor",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/",
"${workspaceFolder}/packages/"
],
"browse": {
"path": [
@@ -26,14 +26,14 @@
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/vendor/*",
"${workspaceFolder}/vendor/boringssl/include/*",
"${workspaceFolder}/src/deps/*",
"${workspaceFolder}/src/deps/boringssl/include/*",
"${workspaceFolder}/packages/bun-usockets/*",
"${workspaceFolder}/packages/bun-uws/*",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/napi/*"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb",
"databaseFilename": ".vscode/cppdb"
},
"defines": [
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
@@ -44,23 +44,22 @@
"BUILDING_JSCONLY__",
"USE_FOUNDATION=1",
"ASSERT_ENABLED=1",
"DU_DISABLE_RENAMING=1",
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"cStandard": "c17",
"cppStandard": "c++20",
"cppStandard": "c++20"
},
{
"name": "BunWithJSCDebug",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/build/debug/codegen",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
@@ -68,19 +67,19 @@
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/vendor/boringssl/include/",
"${workspaceFolder}/vendor",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/",
"${workspaceFolder}/packages/"
],
"browse": {
"path": [
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers/**",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/napi/*",
@@ -90,14 +89,14 @@
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/vendor",
"${workspaceFolder}/vendor/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/packages/bun-usockets/",
"${workspaceFolder}/packages/bun-uws/",
"${workspaceFolder}/src/napi",
"${workspaceFolder}/src/napi"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb_debug",
"databaseFilename": ".vscode/cppdb_debug"
},
"defines": [
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
@@ -108,13 +107,13 @@
"BUILDING_JSCONLY__",
"USE_FOUNDATION=1",
"ASSERT_ENABLED=1",
"DU_DISABLE_RENAMING=1",
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"cStandard": "c17",
"cppStandard": "c++20",
},
"cppStandard": "c++20"
}
],
"version": 4,
"version": 4
}

View File

@@ -28,6 +28,6 @@
"tamasfe.even-better-toml",
// Other
"bierner.comment-tagged-templates",
],
"bierner.comment-tagged-templates"
]
}

971
.vscode/launch.json generated vendored

File diff suppressed because it is too large Load Diff

84
.vscode/settings.json vendored
View File

@@ -12,11 +12,8 @@
"search.exclude": {
"node_modules": true,
".git": true,
"vendor/*/**": true,
"test/node.js/upstream": true,
// This will fill up your whole search history.
"test/js/node/test/fixtures": true,
"test/js/node/test/common": true,
"src/bun.js/WebKit": true,
"src/deps/*/**": true
},
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
@@ -28,66 +25,49 @@
// Zig
"zig.initialSetupDone": true,
"zig.buildOnSave": false,
"zig.buildOption": "build",
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
"zig.zls.buildOnSaveStep": "check",
// "zig.zls.enableBuildOnSave": true,
// "zig.buildOnSave": true,
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
"zig.path": "${workspaceFolder}/.cache/zig/zig.exe",
"zig.formattingProvider": "zls",
"zig.zls.enableInlayHints": false,
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "ziglang.vscode-zig",
"editor.codeActionsOnSave": {
"source.organizeImports": "never",
},
"editor.defaultFormatter": "ziglang.vscode-zig"
},
// lldb
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
"lldb.verboseLogging": false,
// C++
"lldb.verboseLogging": false,
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"C_Cpp.errorSquiggles": "enabled",
"[cpp]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
"editor.defaultFormatter": "xaver.clang-format"
},
"[c]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
"editor.defaultFormatter": "xaver.clang-format"
},
"[h]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
"editor.defaultFormatter": "xaver.clang-format"
},
"clangd.arguments": ["--header-insertion=never"],
// JavaScript
"prettier.enable": true,
"prettier.configPath": ".prettierrc",
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},
"[javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"prettier.prettierPath": "./node_modules/prettier",
// TypeScript
"typescript.tsdk": "node_modules/typescript/lib",
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
// JSON
@@ -97,7 +77,7 @@
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},
// Markdown
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
@@ -108,8 +88,8 @@
"editor.quickSuggestions": {
"comments": "off",
"strings": "off",
"other": "off",
},
"other": "off"
}
},
// TOML
@@ -122,11 +102,6 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
},
// Docker
"[dockerfile]": {
"editor.formatOnSave": false,
},
// Files
"files.exclude": {
"**/.git": true,
@@ -138,16 +113,23 @@
"**/*.xcworkspacedata": true,
"**/*.xcscheme": true,
"**/*.xcodeproj": true,
"src/bun.js/WebKit": true,
"src/deps/libarchive": true,
"src/deps/mimalloc": true,
"src/deps/s2n-tls": true,
"src/deps/boringssl": true,
"src/deps/openssl": true,
"src/deps/uws": true,
"src/deps/zlib": true,
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"**/*.i": true,
"packages/bun-uws/fuzzing/seed-corpus": true
},
"files.associations": {
"*.css": "tailwindcss",
"*.idl": "cpp",
"*.mdc": "markdown",
"array": "cpp",
"ios": "cpp",
"oxlint.json": "jsonc",
"bun.lock": "jsonc",
"*.idl": "cpp"
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -165,8 +147,6 @@
"WebKit/WebCore": true,
"WebKit/WebDriver": true,
"WebKit/WebKitBuild": true,
"WebKit/WebInspectorUI": true,
"WebKit/WebInspectorUI": true
},
"git.detectSubmodules": false,
"bun.test.customScript": "bun-debug test"
}

95
.vscode/tasks.json vendored
View File

@@ -2,58 +2,51 @@
"version": "2.0.0",
"tasks": [
{
"label": "Build Bun",
"type": "shell",
"command": "bun run build",
"group": {
"kind": "build",
"isDefault": true,
"type": "process",
"label": "Install Dependencies",
"command": "scripts/all-dependencies.sh",
"windows": {
"command": "scripts/all-dependencies.ps1"
},
"problemMatcher": [
{
"owner": "zig",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s+(.+)$",
"message": 1,
"loop": true,
},
],
},
{
"owner": "clang",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s*(.*)$",
"message": 1,
"loop": true,
},
],
},
],
"presentation": {
"reveal": "always",
"panel": "shared",
"clear": true,
"icon": {
"id": "arrow-down"
},
"options": {
"cwd": "${workspaceFolder}"
},
},
],
{
"type": "process",
"label": "Setup Environment",
"dependsOn": ["Install Dependencies"],
"command": "scripts/setup.sh",
"windows": {
"command": "scripts/setup.ps1"
},
"icon": {
"id": "check"
},
"options": {
"cwd": "${workspaceFolder}"
},
},
{
"type": "process",
"label": "Build Bun",
"dependsOn": ["Setup Environment"],
"command": "bun",
"args": ["run", "build"],
"icon": {
"id": "gear"
},
"options": {
"cwd": "${workspaceFolder}"
},
"isBuildCommand": true,
"runOptions": {
"instanceLimit": 1,
"reevaluateOnRerun": true,
},
},
]
}

View File

@@ -1 +0,0 @@
CLAUDE.md

245
CLAUDE.md
View File

@@ -1,245 +0,0 @@
This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed for speed, with a bundler, test runner, and Node.js-compatible package manager. It's written primarily in Zig with C++ for JavaScriptCore integration, powered by WebKit's JavaScriptCore engine.
## Building and Running Bun
### Build Commands
- **Build debug version**: `bun bd` or `bun run build:debug`
- Creates a debug build at `./build/debug/bun-debug`
- Compilation takes ~2.5 minutes
- **Run tests with your debug build**: `bun bd test <test-file>`
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
- **Run any command with debug build**: `bun bd <command>`
### Other Build Variants
- `bun run build:release` - Release build
Address sanitizer is enabled by default in debug builds of Bun.
## Testing
### Running Tests
- **Single test file**: `bun bd test test/js/bun/http/serve.test.ts`
- **Fuzzy match test file**: `bun bd test http/serve.test.ts`
- **With filter**: `bun bd test test/js/bun/http/serve.test.ts -t "should handle"`
### Test Organization
- `test/js/bun/` - Bun-specific API tests (http, crypto, ffi, shell, etc.)
- `test/js/node/` - Node.js compatibility tests
- `test/js/web/` - Web API tests (fetch, WebSocket, streams, etc.)
- `test/cli/` - CLI command tests (install, run, test, etc.)
- `test/regression/issue/` - Regression tests (create one per bug fix)
- `test/bundler/` - Bundler and transpiler tests
- `test/integration/` - End-to-end integration tests
- `test/napi/` - N-API compatibility tests
- `test/v8/` - V8 C++ API compatibility tests
### Writing Tests
Tests use Bun's Jest-compatible test runner with proper test fixtures:
```typescript
import { test, expect } from "bun:test";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
test("my feature", async () => {
// Create temp directory with test files
const dir = tempDirWithFiles("test-prefix", {
"index.js": `console.log("hello");`,
});
// Spawn Bun process
await using proc = Bun.spawn({
cmd: [bunExe(), "index.js"],
env: bunEnv,
cwd: dir,
});
const [stdout, stderr, exitCode] = await Promise.all([
new Response(proc.stdout).text(),
new Response(proc.stderr).text(),
proc.exited,
]);
expect(exitCode).toBe(0);
expect(stdout).toBe("hello\n");
});
```
## Code Architecture
### Language Structure
- **Zig code** (`src/*.zig`): Core runtime, JavaScript bindings, package manager
- **C++ code** (`src/bun.js/bindings/*.cpp`): JavaScriptCore bindings, Web APIs
- **TypeScript** (`src/js/`): Built-in JavaScript modules with special syntax (see JavaScript Modules section)
- **Generated code**: Many files are auto-generated from `.classes.ts` and other sources
### Core Source Organization
#### Runtime Core (`src/`)
- `bun.zig` - Main entry point
- `cli.zig` - CLI command orchestration
- `js_parser.zig`, `js_lexer.zig`, `js_printer.zig` - JavaScript parsing/printing
- `transpiler.zig` - Wrapper around js_parser with sourcemap support
- `resolver/` - Module resolution system
- `allocators/` - Custom memory allocators for performance
#### JavaScript Runtime (`src/bun.js/`)
- `bindings/` - C++ JavaScriptCore bindings
- Generated classes from `.classes.ts` files
- Manual bindings for complex APIs
- `api/` - Bun-specific APIs
- `server.zig` - HTTP server implementation
- `FFI.zig` - Foreign Function Interface
- `crypto.zig` - Cryptographic operations
- `glob.zig` - File pattern matching
- `node/` - Node.js compatibility layer
- Module implementations (fs, path, crypto, etc.)
- Process and Buffer APIs
- `webcore/` - Web API implementations
- `fetch.zig` - Fetch API
- `streams.zig` - Web Streams
- `Blob.zig`, `Response.zig`, `Request.zig`
- `event_loop/` - Event loop and task management
#### Build Tools & Package Manager
- `src/bundler/` - JavaScript bundler
- Advanced tree-shaking
- CSS processing
- HTML handling
- `src/install/` - Package manager
- `lockfile/` - Lockfile handling
- `npm.zig` - npm registry client
- `lifecycle_script_runner.zig` - Package scripts
#### Other Key Components
- `src/shell/` - Cross-platform shell implementation
- `src/css/` - CSS parser and processor
- `src/http/` - HTTP client implementation
- `websocket_client/` - WebSocket client (including deflate support)
- `src/sql/` - SQL database integrations
- `src/bake/` - Server-side rendering framework
### JavaScript Class Implementation (C++)
When implementing JavaScript classes in C++:
1. Create three classes if there's a public constructor:
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
- `class FooPrototype : public JSC::JSNonFinalObject`
- `class FooConstructor : public JSC::InternalFunction`
2. Define properties using HashTableValue arrays
3. Add iso subspaces for classes with C++ fields
4. Cache structures in ZigGlobalObject
## Development Workflow
### Code Formatting
- `bun run prettier` - Format JS/TS files
- `bun run zig-format` - Format Zig files
- `bun run clang-format` - Format C++ files
### Watching for Changes
- `bun run watch` - Incremental Zig compilation with error checking
- `bun run watch-windows` - Windows-specific watch mode
### Code Generation
Code generation happens automatically as part of the build process. The main scripts are:
- `src/codegen/generate-classes.ts` - Generates Zig & C++ bindings from `*.classes.ts` files
- `src/codegen/generate-jssink.ts` - Generates stream-related classes
- `src/codegen/bundle-modules.ts` - Bundles built-in modules like `node:fs`
- `src/codegen/bundle-functions.ts` - Bundles global functions like `ReadableStream`
In development, bundled modules can be reloaded without rebuilding Zig by running `bun run build`.
## JavaScript Modules (`src/js/`)
Built-in JavaScript modules use special syntax and are organized as:
- `node/` - Node.js compatibility modules (`node:fs`, `node:path`, etc.)
- `bun/` - Bun-specific modules (`bun:ffi`, `bun:sqlite`, etc.)
- `thirdparty/` - NPM modules we replace (like `ws`)
- `internal/` - Internal modules not exposed to users
- `builtins/` - Core JavaScript builtins (streams, console, etc.)
### Special Syntax in Built-in Modules
1. **`$` prefix** - Access to private properties and JSC intrinsics:
```js
const arr = $Array.from(...); // Private global
map.$set(...); // Private method
const arr2 = $newArrayWithSize(5); // JSC intrinsic
```
2. **`require()`** - Must use string literals, resolved at compile time:
```js
const fs = require("fs"); // Directly loads by numeric ID
```
3. **Debug helpers**:
- `$debug()` - Like console.log but stripped in release builds
- `$assert()` - Assertions stripped in release builds
- `if($debug) {}` - Check if debug env var is set
4. **Platform detection**: `process.platform` and `process.arch` are inlined and dead-code eliminated
5. **Export syntax**: Use `export default` which gets converted to a return statement:
```js
export default {
readFile,
writeFile,
};
```
Note: These are NOT ES modules. The preprocessor converts `$` to `@` (JSC's actual syntax) and handles the special functions.
## CI
Bun uses BuildKite for CI. To get the status of a PR, you can use the following command:
```bash
bun ci
```
## Important Development Notes
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
3. **Follow existing code style** - check neighboring files for patterns
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
5. **Use absolute paths** - Always use absolute paths in file operations
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
## Key APIs and Features
### Bun-Specific APIs
- **Bun.serve()** - High-performance HTTP server
- **Bun.spawn()** - Process spawning with better performance than Node.js
- **Bun.file()** - Fast file I/O operations
- **Bun.write()** - Unified API for writing to files, stdout, etc.
- **Bun.$ (Shell)** - Cross-platform shell scripting
- **Bun.SQLite** - Native SQLite integration
- **Bun.FFI** - Call native libraries from JavaScript
- **Bun.Glob** - Fast file pattern matching

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More