Compare commits

...

8 Commits

Author SHA1 Message Date
Claude Bot
9b735b6e72 fix(ci): rename BUILD_TYPE to BUILD_DIR for clarity
The variable is only used as a directory name, not passed to CMake.
Renaming to BUILD_DIR makes its purpose clear and avoids confusion
with CMake's CMAKE_BUILD_TYPE.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 09:39:23 +00:00
Claude Bot
e90c2bd319 fix(ci): improve shell script error handling
- Use printf with "$*" instead of "$@" in error() to avoid SC2145
- Replace post-command $? checks with direct conditionals for cmake
  invocations to be more reliable with set -e

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 09:31:58 +00:00
Claude Bot
0e184519bd fix(ci): address review comments for dependency baking
- Dockerfile: Use centralized bake-dependencies.sh script instead of
  duplicating cmake/clone logic inline
- bake-dependencies.sh: Fix CMAKE_ARGS quoting by using direct cmake
  invocation with properly quoted arguments
- bake-dependencies.sh: Replace || true with proper error handling that
  logs context and exits non-zero on failure

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 09:27:40 +00:00
Claude Bot
f49229f2ef fix(ci): ensure no gap during AMI replacement
When an AMI name collision occurs during image creation, use a safe
replacement strategy:
1. Create temp image with unique name
2. Wait for temp image to be available
3. Copy temp image to final name
4. Wait for final image to be available
5. Only then deregister old and temp images

This ensures CI jobs always have a working AMI available during
the daily image rebuild - the old image is kept until the new
one is fully ready.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 09:14:24 +00:00
Claude Bot
f605257804 feat(ci): add daily workflow to rebuild CI images
Adds a GitHub Actions workflow that:
- Runs daily at 4 AM UTC
- Rebuilds all CI images with fresh dependencies (Zig, WebKit, etc.)
- Can be triggered manually for specific platforms
- Overwrites existing versioned images (e.g., v27) with updated dependencies

This ensures CI images always have recent dependencies baked in,
reducing build startup time without requiring version bumps.

Platforms rebuilt:
- linux-x64-debian-12
- linux-x64-amazonlinux-2023
- linux-x64-alpine-3.22
- linux-aarch64-debian-12
- linux-aarch64-amazonlinux-2023
- linux-aarch64-alpine-3.22

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 08:47:22 +00:00
Claude Bot
64fb607ba9 fix(ci): keep cmake/ninja files for proper caching
Don't remove CMakeCache.txt, build.ninja, .ninja_log etc. after baking.
These files are needed by ninja to know what's already been built,
preventing re-downloads of Zig, BoringSSL, and other dependencies.

Verified: After baking and simulating CI checkout (git clean -fdx -e build/ -e vendor/),
running `bun run build:release` proceeds directly to building without downloading any
dependencies.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 08:42:30 +00:00
Claude Bot
ab19da8156 fix(ci): correct clone target names and add source list generation
- Fix git clean to use -e 'vendor/' instead of -e 'vendor/zig/'
- Fix clone target names: lolhtml, cares (not lol-html, c-ares)
- Add all clone targets including hdrhistogram, picohttpparser
- Generate cmake source lists before configure (they are gitignored)
- Clean up build artifacts more thoroughly
- Improve dependency listing in bake script output

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 08:36:45 +00:00
Claude Bot
78ca50e356 feat(ci): bake build dependencies into CI images
This change pre-downloads all build dependencies (Zig, WebKit, BoringSSL,
etc.) into the CI build images, significantly reducing build startup time
by avoiding repeated downloads.

Changes:
- Add scripts/bake-dependencies.sh to clone Bun and run cmake configure
  to download all dependencies during image creation
- Update bootstrap.sh to call bake-dependencies.sh when in CI mode
- Add buildkite checkout hook to use git fetch instead of full clone
  when the repository already exists (preserves build/ and vendor/zig/)
- Add buildkite pre-command hook to remove node_modules before each
  build, ensuring clean dependency installation
- Update Dockerfile with the same hooks and pre-baked dependencies

During CI builds:
1. git fetch origin <commit> (instead of full clone)
2. git checkout <commit>
3. rm -rf node_modules && bun install
4. Build uses pre-downloaded dependencies from build/ and vendor/

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-17 08:28:30 +00:00
5 changed files with 445 additions and 4 deletions

View File

@@ -136,7 +136,60 @@ set -efu
export BUILDKITE_BUILD_CHECKOUT_PATH=/var/lib/buildkite-agent/build
EOF
RUN chmod 744 /var/lib/buildkite-agent/hooks/environment
# Checkout hook - uses git fetch instead of clone when repo exists
RUN cat <<'EOF' > /var/lib/buildkite-agent/hooks/checkout
#!/bin/sh
set -efu
CHECKOUT_PATH="${BUILDKITE_BUILD_CHECKOUT_PATH}"
COMMIT="${BUILDKITE_COMMIT}"
REPO="${BUILDKITE_REPO}"
echo "--- :git: Checkout"
if [ -d "${CHECKOUT_PATH}/.git" ]; then
echo "Repository exists, using git fetch..."
cd "${CHECKOUT_PATH}"
git fetch --depth=1 origin "${COMMIT}" || git fetch origin "${COMMIT}"
git reset --hard
git clean -fdx -e 'build/' -e 'vendor/'
git checkout -f "${COMMIT}"
else
echo "Repository does not exist, cloning..."
git clone --depth=1 "${REPO}" "${CHECKOUT_PATH}"
cd "${CHECKOUT_PATH}"
git fetch --depth=1 origin "${COMMIT}" || git fetch origin "${COMMIT}"
git checkout -f "${COMMIT}"
fi
echo "Checked out ${COMMIT}"
EOF
# Pre-command hook - removes node_modules before each build
RUN cat <<'EOF' > /var/lib/buildkite-agent/hooks/pre-command
#!/bin/sh
set -eu
CHECKOUT_PATH="${BUILDKITE_BUILD_CHECKOUT_PATH}"
if [ -d "${CHECKOUT_PATH}/node_modules" ]; then
echo "Removing existing node_modules..."
rm -rf "${CHECKOUT_PATH}/node_modules"
fi
EOF
RUN chmod 755 /var/lib/buildkite-agent/hooks/environment \
&& chmod 755 /var/lib/buildkite-agent/hooks/checkout \
&& chmod 755 /var/lib/buildkite-agent/hooks/pre-command
# Pre-download the bake-dependencies script and run it
# The script handles: cloning repo, installing deps, running cmake, downloading build deps
# This avoids downloading dependencies during each CI build
RUN curl -fsSL https://raw.githubusercontent.com/oven-sh/bun/main/scripts/bake-dependencies.sh -o /tmp/bake-dependencies.sh && \
BUN_REPO_PATH=/var/lib/buildkite-agent/build sh /tmp/bake-dependencies.sh && \
rm /tmp/bake-dependencies.sh
WORKDIR /var/lib/buildkite-agent/build
COPY ../*/agent.mjs /var/bun/scripts/

106
.github/workflows/ci-images.yml vendored Normal file
View File

@@ -0,0 +1,106 @@
name: Rebuild CI Images
on:
schedule:
# Run daily at 4 AM UTC
- cron: "0 4 * * *"
workflow_dispatch:
inputs:
platforms:
description: 'Platforms to rebuild (comma-separated, or "all")'
required: false
default: "all"
type: string
jobs:
# Determine which platforms to build
matrix:
runs-on: ubuntu-latest
outputs:
platforms: ${{ steps.set-matrix.outputs.platforms }}
steps:
- uses: actions/checkout@v4
- id: set-matrix
run: |
if [ "${{ github.event.inputs.platforms }}" = "all" ] || [ -z "${{ github.event.inputs.platforms }}" ]; then
# All platforms used in CI
echo 'platforms=["linux-x64-debian-12","linux-x64-amazonlinux-2023","linux-x64-alpine-3.22","linux-aarch64-debian-12","linux-aarch64-amazonlinux-2023","linux-aarch64-alpine-3.22"]' >> $GITHUB_OUTPUT
else
# Convert comma-separated to JSON array
platforms=$(echo '${{ github.event.inputs.platforms }}' | jq -R -s -c 'split(",") | map(gsub("^\\s+|\\s+$";""))')
echo "platforms=$platforms" >> $GITHUB_OUTPUT
fi
rebuild:
needs: matrix
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
platform: ${{ fromJson(needs.matrix.outputs.platforms) }}
permissions:
id-token: write
contents: read
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_ROLE_ARN }}
aws-region: us-east-1
- name: Parse platform
id: parse
run: |
platform="${{ matrix.platform }}"
# Parse platform string like "linux-x64-debian-12" or "linux-aarch64-alpine-3.22"
os=$(echo "$platform" | cut -d'-' -f1)
arch=$(echo "$platform" | cut -d'-' -f2)
distro=$(echo "$platform" | cut -d'-' -f3)
release=$(echo "$platform" | cut -d'-' -f4)
echo "os=$os" >> $GITHUB_OUTPUT
echo "arch=$arch" >> $GITHUB_OUTPUT
echo "distro=$distro" >> $GITHUB_OUTPUT
echo "release=$release" >> $GITHUB_OUTPUT
- name: Get bootstrap version
id: version
run: |
version=$(grep -E '^# Version: [0-9]+' scripts/bootstrap.sh | grep -oE '[0-9]+')
echo "version=v${version}" >> $GITHUB_OUTPUT
echo "Bootstrap version: v${version}"
- name: Rebuild and publish image
env:
PLATFORM: ${{ matrix.platform }}
OS: ${{ steps.parse.outputs.os }}
ARCH: ${{ steps.parse.outputs.arch }}
DISTRO: ${{ steps.parse.outputs.distro }}
RELEASE: ${{ steps.parse.outputs.release }}
VERSION: ${{ steps.version.outputs.version }}
run: |
echo "Rebuilding CI image for $PLATFORM (version $VERSION)"
# Build and publish the image
# This will create/overwrite the AMI with name like "linux-x64-debian-12-v27"
bun run scripts/machine.mjs \
--os "$OS" \
--arch "$ARCH" \
--distro "$DISTRO" \
--release "$RELEASE" \
--ci \
publish-image
- name: Summary
run: |
echo "## Rebuilt CI Image" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Platform:** ${{ matrix.platform }}" >> $GITHUB_STEP_SUMMARY
echo "- **Version:** ${{ steps.version.outputs.version }}" >> $GITHUB_STEP_SUMMARY
echo "- **Date:** $(date -u +%Y-%m-%d)" >> $GITHUB_STEP_SUMMARY

186
scripts/bake-dependencies.sh Executable file
View File

@@ -0,0 +1,186 @@
#!/bin/sh
# Version: 1
#
# This script pre-downloads all build dependencies into the image.
# It should be run during AMI baking to avoid downloading dependencies
# during each CI build.
#
# Dependencies downloaded:
# - Zig compiler
# - WebKit (JavaScriptCore)
# - BoringSSL
# - And other cmake-managed dependencies
set -eu
print() {
echo "$@"
}
error() {
printf 'error: %s\n' "$*" >&2
exit 1
}
# Detect architecture
detect_arch() {
arch="$(uname -m)"
case "$arch" in
x86_64 | x64 | amd64)
echo "x64"
;;
aarch64 | arm64)
echo "aarch64"
;;
*)
error "Unsupported architecture: $arch"
;;
esac
}
# Detect ABI (glibc vs musl)
detect_abi() {
if [ -f "/etc/alpine-release" ]; then
echo "musl"
else
ldd_output="$(ldd --version 2>&1 || true)"
case "$ldd_output" in
*musl*)
echo "musl"
;;
*)
echo ""
;;
esac
fi
}
ARCH="$(detect_arch)"
ABI="$(detect_abi)"
# Default paths - these should match what the buildkite agent uses
BUN_REPO_PATH="${BUN_REPO_PATH:-/var/lib/buildkite-agent/build}"
BUILD_DIR="${BUILD_DIR:-release}"
print "=== Bun Dependency Baking Script ==="
print "Architecture: $ARCH"
print "ABI: ${ABI:-glibc}"
print "Repository path: $BUN_REPO_PATH"
print "Build directory: $BUILD_DIR"
# Clone the Bun repository if it doesn't exist
if [ ! -d "$BUN_REPO_PATH/.git" ]; then
print "Cloning Bun repository..."
git clone --depth=1 https://github.com/oven-sh/bun.git "$BUN_REPO_PATH"
else
print "Bun repository already exists, updating..."
cd "$BUN_REPO_PATH"
git fetch --depth=1 origin main
git checkout FETCH_HEAD
fi
cd "$BUN_REPO_PATH"
# Install npm dependencies (will be cleaned later, but needed for codegen)
print "Installing npm dependencies..."
bun install --frozen-lockfile
# Generate cmake source lists (these are gitignored but required for cmake configure)
print "Generating cmake source lists..."
bun run scripts/glob-sources.mjs
# Set up build directory
BUILD_PATH="$BUN_REPO_PATH/build/$BUILD_DIR"
CACHE_PATH="$BUILD_PATH/cache"
mkdir -p "$BUILD_PATH" "$CACHE_PATH"
print "Build path: $BUILD_PATH"
print "Cache path: $CACHE_PATH"
# Run cmake configure to download all dependencies
# This will download: Zig, WebKit, BoringSSL, and all other dependencies
print "Running CMake configure to download dependencies..."
# Run cmake configure - this downloads WebKit
if [ -n "$ABI" ]; then
if ! cmake -S "$BUN_REPO_PATH" -B "$BUILD_PATH" -G Ninja -DCMAKE_BUILD_TYPE=Release -DCI=ON -DABI="$ABI"; then
error "CMake configure failed for release build"
fi
else
if ! cmake -S "$BUN_REPO_PATH" -B "$BUILD_PATH" -G Ninja -DCMAKE_BUILD_TYPE=Release -DCI=ON; then
error "CMake configure failed for release build"
fi
fi
# Run cmake build for clone targets only - this downloads Zig, BoringSSL, etc.
# These are build targets that download dependencies
print "Downloading build dependencies (Zig, BoringSSL, etc.)..."
CLONE_TARGETS="clone-zig clone-boringssl clone-mimalloc clone-zstd clone-lolhtml clone-cares clone-libdeflate clone-libarchive clone-tinycc clone-zlib clone-lshpack clone-brotli clone-highway clone-hdrhistogram clone-picohttpparser"
if ! cmake --build "$BUILD_PATH" --target $CLONE_TARGETS; then
error "Failed to download build dependencies (clone targets) for release build in $BUILD_PATH"
fi
# Also download debug WebKit variant for debug builds
print "Downloading debug WebKit variant..."
if [ -n "$ABI" ]; then
if ! cmake -S "$BUN_REPO_PATH" -B "$BUN_REPO_PATH/build/debug" -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCI=ON -DABI="$ABI"; then
error "CMake configure failed for debug build"
fi
else
if ! cmake -S "$BUN_REPO_PATH" -B "$BUN_REPO_PATH/build/debug" -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCI=ON; then
error "CMake configure failed for debug build"
fi
fi
# Keep cmake/ninja files so subsequent builds don't re-download dependencies
# The ninja build system tracks what's been built - removing these files causes re-downloads
print "Keeping build system files for caching..."
# Remove node_modules - will be reinstalled during actual builds
print "Removing node_modules..."
rm -rf "$BUN_REPO_PATH/node_modules"
# List what was downloaded
print ""
print "=== Downloaded Dependencies ==="
VENDOR_PATH="$BUN_REPO_PATH/vendor"
if [ -d "$VENDOR_PATH/zig" ]; then
print "✓ Zig compiler: $(du -sh "$VENDOR_PATH/zig" 2>/dev/null | cut -f1)"
fi
if [ -d "$CACHE_PATH" ]; then
for webkit_dir in "$CACHE_PATH"/webkit-*; do
if [ -d "$webkit_dir" ]; then
print "✓ WebKit: $(du -sh "$webkit_dir" 2>/dev/null | cut -f1)"
break
fi
done
fi
if [ -d "$VENDOR_PATH/boringssl" ]; then
print "✓ BoringSSL: $(du -sh "$VENDOR_PATH/boringssl" 2>/dev/null | cut -f1)"
fi
if [ -d "$VENDOR_PATH/mimalloc" ]; then
print "✓ mimalloc: $(du -sh "$VENDOR_PATH/mimalloc" 2>/dev/null | cut -f1)"
fi
if [ -d "$VENDOR_PATH/zstd" ]; then
print "✓ zstd: $(du -sh "$VENDOR_PATH/zstd" 2>/dev/null | cut -f1)"
fi
if [ -d "$VENDOR_PATH/lolhtml" ]; then
print "✓ lol-html: $(du -sh "$VENDOR_PATH/lolhtml" 2>/dev/null | cut -f1)"
fi
if [ -d "$VENDOR_PATH/cares" ]; then
print "✓ c-ares: $(du -sh "$VENDOR_PATH/cares" 2>/dev/null | cut -f1)"
fi
# Calculate total size
TOTAL_SIZE="$(du -sh "$BUN_REPO_PATH" 2>/dev/null | cut -f1 || echo 'unknown')"
print ""
print "Total repository size: $TOTAL_SIZE"
print ""
print "=== Dependency baking complete ==="
print "The following will happen during CI builds:"
print " 1. git fetch origin <commit> (instead of full clone)"
print " 2. git checkout <commit>"
print " 3. rm -rf node_modules && bun install"
print " 4. Build will use pre-downloaded dependencies"

View File

@@ -1,5 +1,5 @@
#!/bin/sh
# Version: 26
# Version: 27
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
@@ -1455,13 +1455,69 @@ create_buildkite_user() {
# reasons.
local hook_dir=${home}/hooks
mkdir -p -m 755 "${hook_dir}"
# Environment hook - sets checkout path
cat << EOF > "${hook_dir}/environment"
#!/bin/sh
set -efu
export BUILDKITE_BUILD_CHECKOUT_PATH=${home}/build
EOF
# Checkout hook - uses git fetch instead of clone when repo exists
# This works with the pre-baked repository from bake-dependencies.sh
cat << 'CHECKOUT_EOF' > "${hook_dir}/checkout"
#!/bin/sh
set -efu
CHECKOUT_PATH="${BUILDKITE_BUILD_CHECKOUT_PATH}"
COMMIT="${BUILDKITE_COMMIT}"
REPO="${BUILDKITE_REPO}"
echo "--- :git: Checkout"
if [ -d "${CHECKOUT_PATH}/.git" ]; then
echo "Repository exists, using git fetch..."
cd "${CHECKOUT_PATH}"
# Fetch the specific commit
git fetch --depth=1 origin "${COMMIT}" || git fetch origin "${COMMIT}"
# Clean up any local changes
git reset --hard
git clean -fdx -e 'build/' -e 'vendor/'
# Checkout the commit
git checkout -f "${COMMIT}"
else
echo "Repository does not exist, cloning..."
git clone --depth=1 "${REPO}" "${CHECKOUT_PATH}"
cd "${CHECKOUT_PATH}"
git fetch --depth=1 origin "${COMMIT}" || git fetch origin "${COMMIT}"
git checkout -f "${COMMIT}"
fi
echo "Checked out ${COMMIT}"
CHECKOUT_EOF
# Pre-command hook - removes node_modules before bun install
cat << 'PRECOMMAND_EOF' > "${hook_dir}/pre-command"
#!/bin/sh
set -eu
CHECKOUT_PATH="${BUILDKITE_BUILD_CHECKOUT_PATH}"
# Remove node_modules before each build to ensure clean state
# Dependencies are re-installed via bun install
if [ -d "${CHECKOUT_PATH}/node_modules" ]; then
echo "Removing existing node_modules..."
rm -rf "${CHECKOUT_PATH}/node_modules"
fi
PRECOMMAND_EOF
execute_sudo chmod +x "${hook_dir}/environment"
execute_sudo chmod +x "${hook_dir}/checkout"
execute_sudo chmod +x "${hook_dir}/pre-command"
execute_sudo chown -R "$user:$group" "$hook_dir"
set +ef -"$opts"
@@ -1669,6 +1725,24 @@ ensure_no_tmpfs() {
execute_sudo systemctl mask tmp.mount
}
bake_bun_dependencies() {
if ! [ "$ci" = "1" ]; then
return
fi
print "Baking Bun dependencies into image..."
# The bake script will clone Bun and download all build dependencies
# This includes: Zig, WebKit, BoringSSL, mimalloc, zstd, etc.
bake_script="$(dirname "$0")/bake-dependencies.sh"
if [ -f "$bake_script" ]; then
# Run as buildkite-agent user so files have correct ownership
execute_as_user "BUN_REPO_PATH=/var/lib/buildkite-agent/build sh $bake_script"
else
print "Warning: bake-dependencies.sh not found, skipping dependency baking"
fi
}
main() {
check_features "$@"
check_operating_system
@@ -1685,6 +1759,7 @@ main() {
if [ "${BUN_NO_CORE_DUMP:-0}" != "1" ]; then
configure_core_dumps
fi
bake_bun_dependencies
clean_system
ensure_no_tmpfs
}

View File

@@ -224,6 +224,7 @@ const aws = {
*/
async createImage(options) {
const flags = aws.getFlags(options);
const finalName = options?.name;
/** @type {string | undefined} */
let existingImageId;
@@ -246,9 +247,29 @@ const aws = {
return ImageId;
}
// Name collision: create with temp name, wait for availability, copy with final name,
// then deregister old and temp images. This ensures no gap where the image is unavailable.
const tempName = `${finalName}-temp-${Date.now()}`;
const tempFlags = aws.getFlags({ ...options, name: tempName });
const { ImageId: tempImageId } = await aws.spawn($`ec2 create-image ${tempFlags}`);
// Wait for temp image to be available
await aws.waitImage("image-available", tempImageId);
// Copy temp image to final name
const region = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || "us-east-1";
const { ImageId: finalImageId } = await aws.spawn(
$`ec2 copy-image --source-image-id ${tempImageId} --source-region ${region} --name ${finalName}`,
);
// Wait for final image to be available
await aws.waitImage("image-available", finalImageId);
// Now safe to deregister old and temp images
await aws.spawn($`ec2 deregister-image --image-id ${existingImageId}`);
const { ImageId } = await aws.spawn($`ec2 create-image ${flags}`);
return ImageId;
await aws.spawn($`ec2 deregister-image --image-id ${tempImageId}`);
return finalImageId;
},
/**