mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
1 Commits
dylan/pyth
...
jarred/edg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8e90b04d9f |
19
.aikido
19
.aikido
@@ -1,19 +0,0 @@
|
||||
exclude:
|
||||
paths:
|
||||
- test
|
||||
- scripts
|
||||
- bench
|
||||
- packages/bun-lambda
|
||||
- packages/bun-release
|
||||
- packages/bun-wasm
|
||||
- packages/bun-vscode
|
||||
- packages/bun-plugin-yaml
|
||||
- packages/bun-plugin-svelte
|
||||
- packages/bun-native-plugin-rs
|
||||
- packages/bun-native-bundler-plugin-api
|
||||
- packages/bun-inspector-protocol
|
||||
- packages/bun-inspector-frontend
|
||||
- packages/bun-error
|
||||
- packages/bun-debug-adapter-protocol
|
||||
- packages/bun-build-mdx-rs
|
||||
- packages/@types/bun
|
||||
@@ -1,177 +0,0 @@
|
||||
ARG LLVM_VERSION="19"
|
||||
ARG REPORTED_LLVM_VERSION="19.1.7"
|
||||
ARG OLD_BUN_VERSION="1.1.38"
|
||||
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
|
||||
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
|
||||
FROM base-$TARGETARCH as base
|
||||
|
||||
ARG LLVM_VERSION
|
||||
ARG OLD_BUN_VERSION
|
||||
ARG TARGETARCH
|
||||
ARG REPORTED_LLVM_VERSION
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
CI=true \
|
||||
DOCKER=true
|
||||
|
||||
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
|
||||
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
|
||||
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
|
||||
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
|
||||
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang ccache qemu-user-static \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
|
||||
libgfortran5 libc6-dev \
|
||||
&& wget https://apt.llvm.org/llvm.sh \
|
||||
&& chmod +x llvm.sh \
|
||||
&& ./llvm.sh ${LLVM_VERSION} all \
|
||||
&& rm llvm.sh \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
cmake_version="3.30.5" && \
|
||||
if [ "$TARGETARCH" = "arm64" ]; then \
|
||||
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
|
||||
else \
|
||||
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
|
||||
fi && \
|
||||
wget -O /tmp/cmake.sh "$cmake_url" && \
|
||||
sh /tmp/cmake.sh --skip-license --prefix=/usr
|
||||
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
|
||||
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
|
||||
|
||||
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
|
||||
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
|
||||
|
||||
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
|
||||
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
|
||||
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
|
||||
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include
|
||||
|
||||
RUN if [ "$TARGETARCH" = "arm64" ]; then \
|
||||
export ARCH_PATH="aarch64-linux-gnu"; \
|
||||
else \
|
||||
export ARCH_PATH="x86_64-linux-gnu"; \
|
||||
fi \
|
||||
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
|
||||
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
|
||||
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
|
||||
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
|
||||
&& ldconfig
|
||||
|
||||
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
|
||||
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
|
||||
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
|
||||
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/cc \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/c++
|
||||
|
||||
ENV CC="clang" \
|
||||
CXX="clang++" \
|
||||
AR="llvm-ar-${LLVM_VERSION}" \
|
||||
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
|
||||
LD="lld-${LLVM_VERSION}"
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
bash -c '\
|
||||
set -euxo pipefail && \
|
||||
source /etc/environment && \
|
||||
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
|
||||
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
|
||||
unzip /tmp/bun.zip -d /tmp/bun && \
|
||||
mv /tmp/bun/*/bun /usr/bin/bun && \
|
||||
chmod +x /usr/bin/bun'
|
||||
|
||||
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base as buildkite
|
||||
ARG BUILDKITE_AGENT_TAGS
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
|
||||
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
|
||||
echo "Downloading buildkite" && \
|
||||
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
|
||||
mkdir -p /tmp/buildkite-agent && \
|
||||
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
|
||||
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
|
||||
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
# The following is necessary to configure buildkite to use a stable
|
||||
# checkout directory for ccache to be effective.
|
||||
RUN mkdir -p -m 755 /var/lib/buildkite-agent/hooks && \
|
||||
cat <<'EOF' > /var/lib/buildkite-agent/hooks/environment
|
||||
#!/bin/sh
|
||||
set -efu
|
||||
|
||||
export BUILDKITE_BUILD_CHECKOUT_PATH=/var/lib/buildkite-agent/build
|
||||
EOF
|
||||
|
||||
RUN chmod 744 /var/lib/buildkite-agent/hooks/environment
|
||||
|
||||
COPY ../*/agent.mjs /var/bun/scripts/
|
||||
|
||||
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
|
||||
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
|
||||
|
||||
|
||||
WORKDIR /var/bun/scripts
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
|
||||
|
||||
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
|
||||
|
||||
ARG LLVM_VERSION
|
||||
WORKDIR /workspace/bun
|
||||
|
||||
COPY . /workspace/bun
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
|
||||
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/workspace/bun/build \
|
||||
ls -la \
|
||||
&& bun run build:release \
|
||||
&& mkdir -p /target \
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
@@ -1,122 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Check if running as root
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "error: must run as root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check OS compatibility
|
||||
if ! command -v dnf &> /dev/null; then
|
||||
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
|
||||
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
|
||||
# Print each missing file
|
||||
if [ ! -f /tmp/agent.mjs ]; then
|
||||
echo "error: /tmp/agent.mjs is missing"
|
||||
fi
|
||||
if [ ! -f /tmp/Dockerfile ]; then
|
||||
echo "error: /tmp/Dockerfile is missing"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install Docker
|
||||
dnf update -y
|
||||
dnf install -y docker
|
||||
|
||||
systemctl enable docker
|
||||
systemctl start docker || {
|
||||
echo "error: failed to start Docker"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create builder
|
||||
docker buildx create --name builder --driver docker-container --bootstrap --use || {
|
||||
echo "error: failed to create Docker buildx builder"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Set up Docker to start on boot
|
||||
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
|
||||
[Unit]
|
||||
Description=Buildkite Docker Container
|
||||
After=docker.service network-online.target
|
||||
Requires=docker.service network-online.target
|
||||
|
||||
[Service]
|
||||
TimeoutStartSec=0
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
ExecStartPre=-/usr/bin/docker stop buildkite
|
||||
ExecStartPre=-/usr/bin/docker rm buildkite
|
||||
ExecStart=/usr/bin/docker run \
|
||||
--name buildkite \
|
||||
--restart=unless-stopped \
|
||||
--network host \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v /tmp:/tmp \
|
||||
buildkite:latest
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
echo "Building Buildkite image"
|
||||
|
||||
# Clean up any previous build artifacts
|
||||
rm -rf /tmp/fakebun
|
||||
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
|
||||
|
||||
# Copy required files
|
||||
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
|
||||
echo "error: failed to copy agent.mjs"
|
||||
exit 1
|
||||
}
|
||||
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
|
||||
echo "error: failed to copy Dockerfile"
|
||||
exit 1
|
||||
}
|
||||
|
||||
cd /tmp/fakebun || {
|
||||
echo "error: failed to change directory"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build the Buildkite image
|
||||
docker buildx build \
|
||||
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
|
||||
--tag buildkite:latest \
|
||||
--target buildkite \
|
||||
-f .buildkite/Dockerfile \
|
||||
--load \
|
||||
. || {
|
||||
echo "error: Docker build failed"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create container to ensure image is cached in AMI
|
||||
docker container create \
|
||||
--name buildkite \
|
||||
--restart=unless-stopped \
|
||||
buildkite:latest || {
|
||||
echo "error: failed to create buildkite container"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Reload systemd to pick up new service
|
||||
systemctl daemon-reload
|
||||
|
||||
# Enable the service, but don't start it yet
|
||||
systemctl enable buildkite-agent || {
|
||||
echo "error: failed to enable buildkite-agent service"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Bootstrap complete"
|
||||
echo "To start the Buildkite agent, run: "
|
||||
echo " systemctl start buildkite-agent"
|
||||
@@ -1,16 +0,0 @@
|
||||
# Uploads the latest CI workflow to Buildkite.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# Changes to this file must be manually edited here:
|
||||
# https://buildkite.com/bun/bun/settings/steps
|
||||
steps:
|
||||
- if: "build.pull_request.repository.fork"
|
||||
block: ":eyes:"
|
||||
prompt: "Did you review the PR?"
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- "node .buildkite/ci.mjs"
|
||||
1366
.buildkite/ci.mjs
1366
.buildkite/ci.mjs
File diff suppressed because it is too large
Load Diff
@@ -1,470 +0,0 @@
|
||||
# Windows Code Signing Script for Bun
|
||||
# Uses DigiCert KeyLocker for Authenticode signing
|
||||
# Native PowerShell implementation - no path translation issues
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunProfileExe,
|
||||
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunExe
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
|
||||
# Logging functions
|
||||
function Log-Info {
|
||||
param([string]$Message)
|
||||
Write-Host "[INFO] $Message" -ForegroundColor Cyan
|
||||
}
|
||||
|
||||
function Log-Success {
|
||||
param([string]$Message)
|
||||
Write-Host "[SUCCESS] $Message" -ForegroundColor Green
|
||||
}
|
||||
|
||||
function Log-Error {
|
||||
param([string]$Message)
|
||||
Write-Host "[ERROR] $Message" -ForegroundColor Red
|
||||
}
|
||||
|
||||
function Log-Debug {
|
||||
param([string]$Message)
|
||||
if ($env:DEBUG -eq "true" -or $env:DEBUG -eq "1") {
|
||||
Write-Host "[DEBUG] $Message" -ForegroundColor Gray
|
||||
}
|
||||
}
|
||||
|
||||
# Detect system architecture
|
||||
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
|
||||
# Load Visual Studio environment if not already loaded
|
||||
function Ensure-VSEnvironment {
|
||||
if ($null -eq $env:VSINSTALLDIR) {
|
||||
Log-Info "Loading Visual Studio environment for $script:VsArch..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
$vsDir = & $vswhere -prerelease -latest -property installationPath
|
||||
if ($null -eq $vsDir) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
|
||||
if ($null -eq $vsDir) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
|
||||
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
Log-Success "Visual Studio environment loaded"
|
||||
}
|
||||
|
||||
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
|
||||
}
|
||||
}
|
||||
|
||||
# Check for required environment variables
|
||||
function Check-Environment {
|
||||
Log-Info "Checking environment variables..."
|
||||
|
||||
$required = @{
|
||||
"SM_API_KEY" = $env:SM_API_KEY
|
||||
"SM_CLIENT_CERT_PASSWORD" = $env:SM_CLIENT_CERT_PASSWORD
|
||||
"SM_KEYPAIR_ALIAS" = $env:SM_KEYPAIR_ALIAS
|
||||
"SM_HOST" = $env:SM_HOST
|
||||
"SM_CLIENT_CERT_FILE" = $env:SM_CLIENT_CERT_FILE
|
||||
}
|
||||
|
||||
$missing = @()
|
||||
foreach ($key in $required.Keys) {
|
||||
if ([string]::IsNullOrEmpty($required[$key])) {
|
||||
$missing += $key
|
||||
} else {
|
||||
Log-Debug "$key is set (length: $($required[$key].Length))"
|
||||
}
|
||||
}
|
||||
|
||||
if ($missing.Count -gt 0) {
|
||||
throw "Missing required environment variables: $($missing -join ', ')"
|
||||
}
|
||||
|
||||
Log-Success "All required environment variables are present"
|
||||
}
|
||||
|
||||
# Setup certificate file
|
||||
function Setup-Certificate {
|
||||
Log-Info "Setting up certificate..."
|
||||
|
||||
# Always try to decode as base64 first
|
||||
# If it fails, then treat as file path
|
||||
try {
|
||||
Log-Info "Attempting to decode certificate as base64..."
|
||||
Log-Debug "Input string length: $($env:SM_CLIENT_CERT_FILE.Length) characters"
|
||||
|
||||
$tempCertPath = Join-Path $env:TEMP "digicert_cert_$(Get-Random).p12"
|
||||
|
||||
# Try to decode as base64
|
||||
$certBytes = [System.Convert]::FromBase64String($env:SM_CLIENT_CERT_FILE)
|
||||
[System.IO.File]::WriteAllBytes($tempCertPath, $certBytes)
|
||||
|
||||
# Validate the decoded certificate size
|
||||
$fileSize = (Get-Item $tempCertPath).Length
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Decoded certificate too small: $fileSize bytes (expected >100 bytes)"
|
||||
}
|
||||
|
||||
# Update environment to point to file
|
||||
$env:SM_CLIENT_CERT_FILE = $tempCertPath
|
||||
|
||||
Log-Success "Certificate decoded and written to: $tempCertPath"
|
||||
Log-Debug "Decoded certificate file size: $fileSize bytes"
|
||||
|
||||
# Register cleanup
|
||||
$global:TEMP_CERT_PATH = $tempCertPath
|
||||
|
||||
} catch {
|
||||
# If base64 decode fails, check if it's a file path
|
||||
Log-Info "Base64 decode failed, checking if it's a file path..."
|
||||
Log-Debug "Decode error: $_"
|
||||
|
||||
if (Test-Path $env:SM_CLIENT_CERT_FILE) {
|
||||
$fileSize = (Get-Item $env:SM_CLIENT_CERT_FILE).Length
|
||||
|
||||
# Validate file size
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Certificate file too small: $fileSize bytes at $env:SM_CLIENT_CERT_FILE (possibly corrupted)"
|
||||
}
|
||||
|
||||
Log-Info "Using certificate file: $env:SM_CLIENT_CERT_FILE"
|
||||
Log-Debug "Certificate file size: $fileSize bytes"
|
||||
} else {
|
||||
throw "SM_CLIENT_CERT_FILE is neither valid base64 nor an existing file: $env:SM_CLIENT_CERT_FILE"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Install DigiCert KeyLocker tools
|
||||
function Install-KeyLocker {
|
||||
Log-Info "Setting up DigiCert KeyLocker tools..."
|
||||
|
||||
# Define our controlled installation directory
|
||||
$installDir = "C:\BuildTools\DigiCert"
|
||||
$smctlPath = Join-Path $installDir "smctl.exe"
|
||||
|
||||
# Check if already installed in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools already installed at: $smctlPath"
|
||||
|
||||
# Add to PATH if not already there
|
||||
if ($env:PATH -notlike "*$installDir*") {
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
}
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
Log-Info "Installing KeyLocker tools to: $installDir"
|
||||
|
||||
# Create the installation directory if it doesn't exist
|
||||
if (!(Test-Path $installDir)) {
|
||||
Log-Info "Creating installation directory: $installDir"
|
||||
try {
|
||||
New-Item -ItemType Directory -Path $installDir -Force | Out-Null
|
||||
Log-Success "Created directory: $installDir"
|
||||
} catch {
|
||||
throw "Failed to create directory $installDir : $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Download MSI installer
|
||||
# Note: KeyLocker tools currently only available for x64, but works on ARM64 via emulation
|
||||
$msiArch = "x64"
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-${msiArch}.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-${msiArch}.msi"
|
||||
|
||||
Log-Info "Downloading MSI from: $msiUrl"
|
||||
Log-Info "Downloading to: $msiPath"
|
||||
|
||||
try {
|
||||
# Remove existing MSI if present
|
||||
if (Test-Path $msiPath) {
|
||||
Remove-Item $msiPath -Force
|
||||
Log-Debug "Removed existing MSI file"
|
||||
}
|
||||
|
||||
# Download with progress tracking
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($msiUrl, $msiPath)
|
||||
|
||||
if (!(Test-Path $msiPath)) {
|
||||
throw "MSI download failed - file not found"
|
||||
}
|
||||
|
||||
$fileSize = (Get-Item $msiPath).Length
|
||||
Log-Success "MSI downloaded successfully (size: $fileSize bytes)"
|
||||
|
||||
} catch {
|
||||
throw "Failed to download MSI: $_"
|
||||
}
|
||||
|
||||
# Install MSI
|
||||
Log-Info "Installing MSI..."
|
||||
Log-Debug "MSI path: $msiPath"
|
||||
Log-Debug "File exists: $(Test-Path $msiPath)"
|
||||
Log-Debug "File size: $((Get-Item $msiPath).Length) bytes"
|
||||
|
||||
# Check if running as administrator
|
||||
$isAdmin = ([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator)
|
||||
Log-Info "Running as administrator: $isAdmin"
|
||||
|
||||
# Install MSI silently to our controlled directory
|
||||
$arguments = @(
|
||||
"/i", "`"$msiPath`"",
|
||||
"/quiet",
|
||||
"/norestart",
|
||||
"TARGETDIR=`"$installDir`"",
|
||||
"INSTALLDIR=`"$installDir`"",
|
||||
"ACCEPT_EULA=1",
|
||||
"ADDLOCAL=ALL"
|
||||
)
|
||||
|
||||
Log-Debug "Running: msiexec.exe $($arguments -join ' ')"
|
||||
Log-Info "Installing to: $installDir"
|
||||
|
||||
$process = Start-Process -FilePath "msiexec.exe" -ArgumentList $arguments -Wait -PassThru -NoNewWindow
|
||||
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Log-Error "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
|
||||
# Try to get error details from event log
|
||||
try {
|
||||
$events = Get-WinEvent -LogName "Application" -MaxEvents 10 |
|
||||
Where-Object { $_.ProviderName -eq "MsiInstaller" -and $_.TimeCreated -gt (Get-Date).AddMinutes(-1) }
|
||||
|
||||
foreach ($event in $events) {
|
||||
Log-Debug "MSI Event: $($event.Message)"
|
||||
}
|
||||
} catch {
|
||||
Log-Debug "Could not retrieve MSI installation events"
|
||||
}
|
||||
|
||||
throw "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
}
|
||||
|
||||
Log-Success "MSI installation completed"
|
||||
|
||||
# Wait for installation to complete
|
||||
Start-Sleep -Seconds 2
|
||||
|
||||
# Verify smctl.exe exists in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools installed successfully at: $smctlPath"
|
||||
|
||||
# Add to PATH
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
# If not in our expected location, check if it installed somewhere in the directory
|
||||
$found = Get-ChildItem -Path $installDir -Filter "smctl.exe" -Recurse -ErrorAction SilentlyContinue |
|
||||
Select-Object -First 1
|
||||
|
||||
if ($found) {
|
||||
Log-Success "Found smctl.exe at: $($found.FullName)"
|
||||
$smctlDir = $found.DirectoryName
|
||||
$env:PATH = "$smctlDir;$env:PATH"
|
||||
return $found.FullName
|
||||
}
|
||||
|
||||
throw "KeyLocker tools installation succeeded but smctl.exe not found in $installDir"
|
||||
}
|
||||
|
||||
# Configure KeyLocker
|
||||
function Configure-KeyLocker {
|
||||
param([string]$SmctlPath)
|
||||
|
||||
Log-Info "Configuring KeyLocker..."
|
||||
|
||||
# Verify smctl is accessible
|
||||
try {
|
||||
$version = & $SmctlPath --version 2>&1
|
||||
Log-Debug "smctl version: $version"
|
||||
} catch {
|
||||
throw "Failed to run smctl: $_"
|
||||
}
|
||||
|
||||
# Configure KeyLocker credentials and environment
|
||||
Log-Info "Configuring KeyLocker credentials..."
|
||||
|
||||
try {
|
||||
# Save credentials (API key and password)
|
||||
Log-Info "Saving credentials to OS store..."
|
||||
$saveOutput = & $SmctlPath credentials save $env:SM_API_KEY $env:SM_CLIENT_CERT_PASSWORD 2>&1 | Out-String
|
||||
Log-Debug "Credentials save output: $saveOutput"
|
||||
|
||||
if ($saveOutput -like "*Credentials saved*") {
|
||||
Log-Success "Credentials saved successfully"
|
||||
}
|
||||
|
||||
# Set environment variables for smctl
|
||||
Log-Info "Setting KeyLocker environment variables..."
|
||||
$env:SM_HOST = $env:SM_HOST # Already set, but ensure it's available
|
||||
$env:SM_API_KEY = $env:SM_API_KEY # Already set
|
||||
$env:SM_CLIENT_CERT_FILE = $env:SM_CLIENT_CERT_FILE # Path to decoded cert file
|
||||
Log-Debug "SM_HOST: $env:SM_HOST"
|
||||
Log-Debug "SM_CLIENT_CERT_FILE: $env:SM_CLIENT_CERT_FILE"
|
||||
|
||||
# Run health check
|
||||
Log-Info "Running KeyLocker health check..."
|
||||
$healthOutput = & $SmctlPath healthcheck 2>&1 | Out-String
|
||||
Log-Debug "Health check output: $healthOutput"
|
||||
|
||||
if ($healthOutput -like "*Healthy*" -or $healthOutput -like "*SUCCESS*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "KeyLocker health check passed"
|
||||
} else {
|
||||
Log-Error "Health check failed: $healthOutput"
|
||||
# Don't throw here, sometimes healthcheck is flaky but signing still works
|
||||
}
|
||||
|
||||
# Sync certificates to Windows certificate store
|
||||
Log-Info "Syncing certificates to Windows store..."
|
||||
$syncOutput = & $SmctlPath windows certsync 2>&1 | Out-String
|
||||
Log-Debug "Certificate sync output: $syncOutput"
|
||||
|
||||
if ($syncOutput -like "*success*" -or $syncOutput -like "*synced*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "Certificates synced to Windows store"
|
||||
} else {
|
||||
Log-Info "Certificate sync output: $syncOutput"
|
||||
}
|
||||
|
||||
} catch {
|
||||
throw "Failed to configure KeyLocker: $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Sign an executable
|
||||
function Sign-Executable {
|
||||
param(
|
||||
[string]$ExePath,
|
||||
[string]$SmctlPath
|
||||
)
|
||||
|
||||
if (!(Test-Path $ExePath)) {
|
||||
throw "Executable not found: $ExePath"
|
||||
}
|
||||
|
||||
$fileName = Split-Path $ExePath -Leaf
|
||||
Log-Info "Signing $fileName..."
|
||||
Log-Debug "Full path: $ExePath"
|
||||
Log-Debug "File size: $((Get-Item $ExePath).Length) bytes"
|
||||
|
||||
# Check if already signed
|
||||
$existingSig = Get-AuthenticodeSignature $ExePath
|
||||
if ($existingSig.Status -eq "Valid") {
|
||||
Log-Info "$fileName is already signed by: $($existingSig.SignerCertificate.Subject)"
|
||||
Log-Info "Skipping re-signing"
|
||||
return
|
||||
}
|
||||
|
||||
# Sign the executable using smctl
|
||||
try {
|
||||
# smctl sign command with keypair-alias
|
||||
$signArgs = @(
|
||||
"sign",
|
||||
"--keypair-alias", $env:SM_KEYPAIR_ALIAS,
|
||||
"--input", $ExePath,
|
||||
"--verbose"
|
||||
)
|
||||
|
||||
Log-Debug "Running: $SmctlPath $($signArgs -join ' ')"
|
||||
|
||||
$signOutput = & $SmctlPath $signArgs 2>&1 | Out-String
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Log-Error "Signing output: $signOutput"
|
||||
throw "Signing failed with exit code: $LASTEXITCODE"
|
||||
}
|
||||
|
||||
Log-Debug "Signing output: $signOutput"
|
||||
Log-Success "Signing command completed"
|
||||
|
||||
} catch {
|
||||
throw "Failed to sign $fileName : $_"
|
||||
}
|
||||
|
||||
# Verify signature
|
||||
$newSig = Get-AuthenticodeSignature $ExePath
|
||||
|
||||
if ($newSig.Status -eq "Valid") {
|
||||
Log-Success "$fileName signed successfully"
|
||||
Log-Info "Signed by: $($newSig.SignerCertificate.Subject)"
|
||||
Log-Info "Thumbprint: $($newSig.SignerCertificate.Thumbprint)"
|
||||
Log-Info "Valid from: $($newSig.SignerCertificate.NotBefore) to $($newSig.SignerCertificate.NotAfter)"
|
||||
} else {
|
||||
throw "$fileName signature verification failed: $($newSig.Status) - $($newSig.StatusMessage)"
|
||||
}
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
function Cleanup {
|
||||
if ($global:TEMP_CERT_PATH -and (Test-Path $global:TEMP_CERT_PATH)) {
|
||||
try {
|
||||
Remove-Item $global:TEMP_CERT_PATH -Force
|
||||
Log-Info "Cleaned up temporary certificate"
|
||||
} catch {
|
||||
Log-Error "Failed to cleanup temporary certificate: $_"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Main execution
|
||||
try {
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
Write-Host " Windows Code Signing for Bun" -ForegroundColor Cyan
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
|
||||
# Ensure we're in a VS environment
|
||||
Ensure-VSEnvironment
|
||||
|
||||
# Check environment variables
|
||||
Check-Environment
|
||||
|
||||
# Setup certificate
|
||||
Setup-Certificate
|
||||
|
||||
# Install and configure KeyLocker
|
||||
$smctlPath = Install-KeyLocker
|
||||
Configure-KeyLocker -SmctlPath $smctlPath
|
||||
|
||||
# Sign both executables
|
||||
Sign-Executable -ExePath $BunProfileExe -SmctlPath $smctlPath
|
||||
Sign-Executable -ExePath $BunExe -SmctlPath $smctlPath
|
||||
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
Write-Host " Code signing completed successfully!" -ForegroundColor Green
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
|
||||
exit 0
|
||||
|
||||
} catch {
|
||||
Log-Error "Code signing failed: $_"
|
||||
exit 1
|
||||
|
||||
} finally {
|
||||
Cleanup
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
import { getCommit, getSecret } from "../../scripts/utils.mjs";
|
||||
|
||||
console.log("Submitting...");
|
||||
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
|
||||
method: "POST",
|
||||
});
|
||||
console.log("Got status " + response.status);
|
||||
@@ -1,257 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
|
||||
echo "error: Cannot upload release from a pull request"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_BRANCH" != "main" ]; then
|
||||
echo "error: Cannot upload release from a branch other than main"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v "buildkite-agent" &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_github() {
|
||||
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
|
||||
assert_buildkite_secret "GITHUB_TOKEN"
|
||||
# gh expects the token in $GH_TOKEN
|
||||
export GH_TOKEN="$GITHUB_TOKEN"
|
||||
}
|
||||
|
||||
function assert_aws() {
|
||||
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
|
||||
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
|
||||
}
|
||||
|
||||
function assert_sentry() {
|
||||
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
|
||||
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_secret() {
|
||||
local key="$1"
|
||||
local value=$(buildkite-agent secret get "$key" ${@:2})
|
||||
if [ -z "$value" ]; then
|
||||
echo "error: Cannot find $key secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named $key with a value:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export "$key"="$value"
|
||||
}
|
||||
|
||||
function release_tag() {
|
||||
local version="$1"
|
||||
if [ "$version" == "canary" ]; then
|
||||
echo "canary"
|
||||
else
|
||||
echo "bun-v$version"
|
||||
fi
|
||||
}
|
||||
|
||||
function create_sentry_release() {
|
||||
local version="$1"
|
||||
local release="$version"
|
||||
if [ "$version" == "canary" ]; then
|
||||
release="$BUILDKITE_COMMIT-canary"
|
||||
fi
|
||||
run_command sentry-cli releases new "$release" --finalize
|
||||
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
|
||||
if [ "$version" == "canary" ]; then
|
||||
run_command sentry-cli deploys new --env="canary" --release="$release"
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local name="$1"
|
||||
local dir="$2"
|
||||
if [ -z "$dir" ]; then
|
||||
dir="."
|
||||
fi
|
||||
run_command buildkite-agent artifact download "$name" "$dir"
|
||||
if [ ! -f "$dir/$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_github_asset() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
local file="$2"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
|
||||
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
|
||||
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
|
||||
echo "warn: Uploading $file to $tag failed, retrying..."
|
||||
sleep "$((RANDOM % 5 + 1))"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
done
|
||||
}
|
||||
|
||||
function update_github_release() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
sleep 5 # There is possibly a race condition where this overwrites artifacts?
|
||||
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
|
||||
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_s3_file() {
|
||||
local folder="$1"
|
||||
local file="$2"
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
function send_discord_announcement() {
|
||||
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
|
||||
if [ -z "$value" ]; then
|
||||
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
|
||||
return
|
||||
fi
|
||||
|
||||
local version="$1"
|
||||
local commit="$BUILDKITE_COMMIT"
|
||||
local short_sha="${commit:0:7}"
|
||||
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
|
||||
|
||||
if [ "$version" == "canary" ]; then
|
||||
local json_payload=$(cat <<EOF
|
||||
{
|
||||
"embeds": [{
|
||||
"title": "New Bun Canary now available",
|
||||
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
|
||||
"color": 16023551,
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d "$json_payload" \
|
||||
-sf \
|
||||
"$value" >/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_github
|
||||
assert_aws
|
||||
assert_sentry
|
||||
|
||||
local tag="$1" # 'canary' or 'x.y.z'
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-linux-aarch64-musl.zip
|
||||
bun-linux-aarch64-musl-profile.zip
|
||||
bun-linux-x64-musl.zip
|
||||
bun-linux-x64-musl-profile.zip
|
||||
bun-linux-x64-musl-baseline.zip
|
||||
bun-linux-x64-musl-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
# TODO: Enable when Windows ARM64 CI runners are ready
|
||||
# bun-windows-aarch64.zip
|
||||
# bun-windows-aarch64-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
local artifact="$1"
|
||||
download_buildkite_artifact "$artifact"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
|
||||
else
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
|
||||
fi
|
||||
upload_s3_file "releases/$tag" "$artifact" &
|
||||
upload_github_asset "$tag" "$artifact" &
|
||||
wait
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
upload_artifact "$artifact"
|
||||
done
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
send_discord_announcement "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
assert_canary
|
||||
create_release "canary"
|
||||
@@ -1,9 +0,0 @@
|
||||
WarningsAsErrors: "*"
|
||||
FormatStyle: webkit
|
||||
Checks: >
|
||||
-*,
|
||||
clang-analyzer-*,
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange
|
||||
-clang-analyzer-webkit.UncountedLambdaCapturesChecker
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange
|
||||
-clang-analyzer-webkit.RefCntblBaseVirtualDtor
|
||||
8
.clangd
8
.clangd
@@ -1,8 +0,0 @@
|
||||
Index:
|
||||
Background: Skip # Disable slow background indexing of these files.
|
||||
|
||||
CompileFlags:
|
||||
CompilationDatabase: build/debug
|
||||
|
||||
Diagnostics:
|
||||
UnusedIncludes: None
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
|
||||
description: Find duplicate GitHub issues
|
||||
---
|
||||
|
||||
# Issue deduplication command
|
||||
|
||||
Find up to 3 likely duplicate issues for a given GitHub issue.
|
||||
|
||||
To do this, follow these steps precisely:
|
||||
|
||||
1. Use an agent to check if the GitHub issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicate detection comment (check for the exact HTML marker `<!-- dedupe-bot:marker -->` in the issue comments - ignore other bot comments). If so, do not proceed.
|
||||
2. Use an agent to view a GitHub issue, and ask the agent to return a summary of the issue
|
||||
3. Then, launch 5 parallel agents to search GitHub for duplicates of this issue, using diverse keywords and search approaches, using the summary from Step 2. **IMPORTANT**: Always scope searches with `repo:owner/repo` to constrain results to the current repository only.
|
||||
4. Next, feed the results from Steps 2 and 3 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
|
||||
5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
|
||||
|
||||
Notes (be sure to tell this to your agents, too):
|
||||
|
||||
- Use `gh` to interact with GitHub, rather than web fetch
|
||||
- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
|
||||
- Make a todo list first
|
||||
- Always scope searches with `repo:owner/repo` to prevent cross-repo false positives
|
||||
- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
|
||||
|
||||
---
|
||||
|
||||
Found 3 possible duplicate issues:
|
||||
|
||||
1. <link to issue>
|
||||
2. <link to issue>
|
||||
3. <link to issue>
|
||||
|
||||
This issue will be automatically closed as a duplicate in 3 days.
|
||||
|
||||
- If your issue is a duplicate, please close it and 👍 the existing issue instead
|
||||
- To prevent auto-closure, add a comment or 👎 this comment
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.ai/code)
|
||||
|
||||
<!-- dedupe-bot:marker -->
|
||||
|
||||
---
|
||||
@@ -1,92 +0,0 @@
|
||||
# Upgrading Bun's Self-Reported Node.js Version
|
||||
|
||||
This guide explains how to upgrade the Node.js version that Bun reports for compatibility with Node.js packages and native addons.
|
||||
|
||||
## Overview
|
||||
|
||||
Bun reports a Node.js version for compatibility with the Node.js ecosystem. This affects:
|
||||
- `process.version` output
|
||||
- Node-API (N-API) compatibility
|
||||
- Native addon ABI compatibility
|
||||
- V8 API compatibility for addons using V8 directly
|
||||
|
||||
## Files That Always Need Updates
|
||||
|
||||
### 1. Bootstrap Scripts
|
||||
- `scripts/bootstrap.sh` - Update `NODEJS_VERSION=`
|
||||
- `scripts/bootstrap.ps1` - Update `$NODEJS_VERSION =`
|
||||
|
||||
### 2. CMake Configuration
|
||||
- `cmake/Options.cmake`
|
||||
- `NODEJS_VERSION` - The Node.js version string (e.g., "24.3.0")
|
||||
- `NODEJS_ABI_VERSION` - The ABI version number (find using command below)
|
||||
|
||||
### 3. Version Strings
|
||||
- `src/bun.js/bindings/BunProcess.cpp`
|
||||
- Update `Bun__versions_node` with the Node.js version
|
||||
- Update `Bun__versions_v8` with the V8 version (find using command below)
|
||||
|
||||
### 4. N-API Version
|
||||
- `src/napi/js_native_api.h`
|
||||
- Update `NAPI_VERSION` define (check Node.js release notes)
|
||||
|
||||
## Files That May Need Updates
|
||||
|
||||
Only check these if the build fails or tests crash after updating version numbers:
|
||||
- V8 compatibility files in `src/bun.js/bindings/v8/` (if V8 API changed)
|
||||
- Test files (if Node.js requires newer C++ standard)
|
||||
|
||||
## Quick Commands to Find Version Info
|
||||
|
||||
```bash
|
||||
# Get latest Node.js version info
|
||||
curl -s https://nodejs.org/dist/index.json | jq '.[0]'
|
||||
|
||||
# Get V8 version for a specific Node.js version (replace v24.3.0)
|
||||
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep V8_VERSION
|
||||
|
||||
# Get ABI version for a specific Node.js version
|
||||
curl -s https://nodejs.org/dist/v24.3.0/node-v24.3.0-headers.tar.gz | tar -xzO node-v24.3.0/include/node/node_version.h | grep NODE_MODULE_VERSION
|
||||
|
||||
# Or use the ABI registry
|
||||
curl -s https://raw.githubusercontent.com/nodejs/node/main/doc/abi_version_registry.json | jq '.NODE_MODULE_VERSION."<version>"'
|
||||
```
|
||||
|
||||
## Update Process
|
||||
|
||||
1. **Gather version info** using the commands above
|
||||
2. **Update the required files** listed in the sections above
|
||||
3. **Build and test**:
|
||||
```bash
|
||||
bun bd
|
||||
bun bd -e "console.log(process.version)"
|
||||
bun bd -e "console.log(process.versions.v8)"
|
||||
bun bd test test/v8/v8.test.ts
|
||||
bun bd test test/napi/napi.test.ts
|
||||
```
|
||||
|
||||
4. **Check for V8 API changes** only if build fails or tests crash:
|
||||
- Compare v8-function-callback.h between versions
|
||||
- Check v8-internal.h for Isolate size changes
|
||||
- Look for new required APIs in build errors
|
||||
|
||||
## If Build Fails or Tests Crash
|
||||
|
||||
The V8 API rarely has breaking changes between minor Node.js versions. If you encounter issues:
|
||||
1. Check build errors for missing symbols or type mismatches
|
||||
2. Compare V8 headers between old and new Node.js versions
|
||||
3. Most issues can be resolved by implementing missing functions or adjusting structures
|
||||
|
||||
## Testing Checklist
|
||||
|
||||
- [ ] `process.version` returns correct version
|
||||
- [ ] `process.versions.v8` returns correct V8 version
|
||||
- [ ] `process.config.variables.node_module_version` returns correct ABI
|
||||
- [ ] V8 tests pass
|
||||
- [ ] N-API tests pass
|
||||
|
||||
## Notes
|
||||
|
||||
- Most upgrades only require updating version numbers
|
||||
- Major V8 version changes (rare) may require API updates
|
||||
- The V8 shim implements only APIs used by common native addons
|
||||
@@ -1,26 +0,0 @@
|
||||
Upgrade Bun's Webkit fork to the latest upstream version of Webkit.
|
||||
|
||||
To do that:
|
||||
|
||||
- cd vendor/WebKit
|
||||
- git fetch upstream
|
||||
- git merge upstream main
|
||||
- Fix the merge conflicts
|
||||
- bun build.ts debug
|
||||
- While it compiles, in another task review the JSC commits between the last version of Webkit and the new version. Write up a summary of the webkit changes in a file called "webkit-changes.md"
|
||||
- bun run build:local (build a build of Bun with the new Webkit, make sure it compiles)
|
||||
- After making sure it compiles, run some code to make sure things work. something like ./build/debug-local/bun-debug --print '42' should be all you need
|
||||
- cd vendor/WebKit
|
||||
- git commit -am "Upgrade Webkit to the latest version"
|
||||
- git push
|
||||
- get the commit SHA in the vendor/WebKit directory of your new commit
|
||||
- cd ../../ (back to bun)
|
||||
- Update WEBKIT_VERSION in cmake/tools/SetupWebKit.cmake to the commit SHA of your new commit
|
||||
- git checkout -b bun/webkit-upgrade-<commit-sha>
|
||||
- commit + push (without adding the webkit-changes.md file)
|
||||
- create PR titled "Upgrade Webkit to the <commit-sha>", paste your webkit-changes.md into the PR description
|
||||
- delete the webkit-changes.md file
|
||||
|
||||
Things to check for a successful upgrade:
|
||||
- Did JSType in vendor/WebKit/Source/JavaScriptCore have any recent changes? Does the enum values align with whats present in src/bun.js/bindings/JSType.zig?
|
||||
- Were there any changes to the webcore code generator? If there are C++ compilation errors, check for differences in some of the generated code in like vendor/WebKit/source/WebCore/bindings/scripts/test/JS/
|
||||
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { extname } from "path";
|
||||
import { spawnSync } from "child_process";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const filePath = toolInput.file_path;
|
||||
|
||||
// Only process Write, Edit, and MultiEdit tools
|
||||
if (!["Write", "Edit", "MultiEdit"].includes(toolName)) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const ext = extname(filePath);
|
||||
|
||||
// Only format known files
|
||||
if (!filePath) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function formatZigFile() {
|
||||
try {
|
||||
// Format the Zig file
|
||||
const result = spawnSync("vendor/zig/zig.exe", ["fmt", filePath], {
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
console.error(`Failed to format ${filePath}: ${result.error.message}`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.error(`zig fmt failed for ${filePath}:`);
|
||||
if (result.stderr) {
|
||||
console.error(result.stderr);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
function formatTypeScriptFile() {
|
||||
try {
|
||||
// Format the TypeScript file
|
||||
const result = spawnSync(
|
||||
"./node_modules/.bin/prettier",
|
||||
["--plugin=prettier-plugin-organize-imports", "--config", ".prettierrc", "--write", filePath],
|
||||
{
|
||||
cwd: process.env.CLAUDE_PROJECT_DIR || process.cwd(),
|
||||
encoding: "utf-8",
|
||||
},
|
||||
);
|
||||
} catch (error) {}
|
||||
}
|
||||
|
||||
if (ext === ".zig") {
|
||||
formatZigFile();
|
||||
} else if (
|
||||
[
|
||||
".cjs",
|
||||
".css",
|
||||
".html",
|
||||
".js",
|
||||
".json",
|
||||
".jsonc",
|
||||
".jsx",
|
||||
".less",
|
||||
".mjs",
|
||||
".pcss",
|
||||
".postcss",
|
||||
".sass",
|
||||
".scss",
|
||||
".styl",
|
||||
".stylus",
|
||||
".toml",
|
||||
".ts",
|
||||
".tsx",
|
||||
".yaml",
|
||||
].includes(ext)
|
||||
) {
|
||||
formatTypeScriptFile();
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
import { basename, extname } from "path";
|
||||
|
||||
const input = await Bun.stdin.json();
|
||||
|
||||
const toolName = input.tool_name;
|
||||
const toolInput = input.tool_input || {};
|
||||
const command = toolInput.command || "";
|
||||
const timeout = toolInput.timeout;
|
||||
const cwd = input.cwd || "";
|
||||
|
||||
// Get environment variables from the hook context
|
||||
// Note: We check process.env directly as env vars are inherited
|
||||
let useSystemBun = process.env.USE_SYSTEM_BUN;
|
||||
|
||||
if (toolName !== "Bash" || !command) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function denyWithReason(reason) {
|
||||
const output = {
|
||||
hookSpecificOutput: {
|
||||
hookEventName: "PreToolUse",
|
||||
permissionDecision: "deny",
|
||||
permissionDecisionReason: reason,
|
||||
},
|
||||
};
|
||||
console.log(JSON.stringify(output));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Parse the command to extract argv0 and positional args
|
||||
let tokens;
|
||||
try {
|
||||
// Simple shell parsing - split on spaces but respect quotes (both single and double)
|
||||
tokens = command.match(/(?:[^\s"']+|"[^"]*"|'[^']*')+/g)?.map(t => t.replace(/^['"]|['"]$/g, "")) || [];
|
||||
} catch {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (tokens.length === 0) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Strip inline environment variable assignments (e.g., FOO=1 bun test)
|
||||
const inlineEnv = new Map();
|
||||
let commandStart = 0;
|
||||
while (
|
||||
commandStart < tokens.length &&
|
||||
/^[A-Za-z_][A-Za-z0-9_]*=/.test(tokens[commandStart]) &&
|
||||
!tokens[commandStart].includes("/")
|
||||
) {
|
||||
const [name, value = ""] = tokens[commandStart].split("=", 2);
|
||||
inlineEnv.set(name, value);
|
||||
commandStart++;
|
||||
}
|
||||
if (commandStart >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
tokens = tokens.slice(commandStart);
|
||||
useSystemBun = inlineEnv.get("USE_SYSTEM_BUN") ?? useSystemBun;
|
||||
|
||||
// Get the executable name (argv0)
|
||||
const argv0 = basename(tokens[0], extname(tokens[0]));
|
||||
|
||||
// Check if it's zig or zig.exe
|
||||
if (argv0 === "zig") {
|
||||
// Filter out flags (starting with -) to get positional arguments
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if the positional args contain "build" followed by "obj"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "build" && positionalArgs[1] === "obj") {
|
||||
denyWithReason("error: Use `bun bd` to build Bun and wait patiently");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if argv0 is timeout and the command is "bun bd"
|
||||
if (argv0 === "timeout") {
|
||||
// Find the actual command after timeout and its arguments
|
||||
const timeoutArgEndIndex = tokens.slice(1).findIndex(t => !t.startsWith("-") && !/^\d/.test(t));
|
||||
if (timeoutArgEndIndex === -1) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommandIndex = timeoutArgEndIndex + 1;
|
||||
if (actualCommandIndex >= tokens.length) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const actualCommand = basename(tokens[actualCommandIndex]);
|
||||
const restArgs = tokens.slice(actualCommandIndex + 1);
|
||||
|
||||
// Check if it's "bun bd" or "bun-debug bd" without other positional args
|
||||
if (actualCommand === "bun" || actualCommand.includes("bun-debug")) {
|
||||
// Claude is a sneaky fucker
|
||||
let positionalArgs = restArgs.filter(arg => !arg.startsWith("-"));
|
||||
const redirectStderrToStdoutIndex = positionalArgs.findIndex(arg => arg === "2>&1");
|
||||
if (redirectStderrToStdoutIndex !== -1) {
|
||||
positionalArgs.splice(redirectStderrToStdoutIndex, 1);
|
||||
}
|
||||
const redirectStdoutToStderrIndex = positionalArgs.findIndex(arg => arg === "1>&2");
|
||||
if (redirectStdoutToStderrIndex !== -1) {
|
||||
positionalArgs.splice(redirectStdoutToStderrIndex, 1);
|
||||
}
|
||||
|
||||
const redirectToFileIndex = positionalArgs.findIndex(arg => arg === ">");
|
||||
if (redirectToFileIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileIndex, 2);
|
||||
}
|
||||
|
||||
const redirectToFileAppendIndex = positionalArgs.findIndex(arg => arg === ">>");
|
||||
if (redirectToFileAppendIndex !== -1) {
|
||||
positionalArgs.splice(redirectToFileAppendIndex, 2);
|
||||
}
|
||||
|
||||
const redirectTOFileInlineIndex = positionalArgs.findIndex(arg => arg.startsWith(">"));
|
||||
if (redirectTOFileInlineIndex !== -1) {
|
||||
positionalArgs.splice(redirectTOFileInlineIndex, 1);
|
||||
}
|
||||
|
||||
const pipeIndex = positionalArgs.findIndex(arg => arg === "|");
|
||||
if (pipeIndex !== -1) {
|
||||
positionalArgs = positionalArgs.slice(0, pipeIndex);
|
||||
}
|
||||
|
||||
positionalArgs = positionalArgs.map(arg => arg.trim()).filter(Boolean);
|
||||
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if command is "bun .* test" or "bun-debug test" with -u/--update-snapshots AND -t/--test-name-pattern
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
|
||||
// Check if "test" is in positional args or "bd" followed by "test"
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
const hasTest = positionalArgs.includes("test") || (positionalArgs[0] === "bd" && positionalArgs[1] === "test");
|
||||
|
||||
if (hasTest) {
|
||||
const hasUpdateSnapshots = allArgs.some(arg => arg === "-u" || arg === "--update-snapshots");
|
||||
const hasTestNamePattern = allArgs.some(arg => arg === "-t" || arg === "--test-name-pattern");
|
||||
|
||||
if (hasUpdateSnapshots && hasTestNamePattern) {
|
||||
denyWithReason("error: Cannot use -u/--update-snapshots with -t/--test-name-pattern");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if timeout option is set for "bun bd" command
|
||||
if (timeout !== undefined && (argv0 === "bun" || argv0.includes("bun-debug"))) {
|
||||
const positionalArgs = tokens.slice(1).filter(arg => !arg.startsWith("-"));
|
||||
if (positionalArgs.length === 1 && positionalArgs[0] === "bd") {
|
||||
denyWithReason("error: Run `bun bd` without a timeout");
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun test <file>" without USE_SYSTEM_BUN=1
|
||||
if ((argv0 === "bun" || argv0.includes("bun-debug")) && useSystemBun !== "1") {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "test" (not "bd test")
|
||||
if (positionalArgs.length >= 1 && positionalArgs[0] === "test" && positionalArgs[0] !== "bd") {
|
||||
denyWithReason(
|
||||
"error: In development, use `bun bd test <file>` to test your changes. If you meant to use a release version, set USE_SYSTEM_BUN=1",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if running "bun bd test" from bun repo root or test folder without a file path
|
||||
if (argv0 === "bun" || argv0.includes("bun-debug")) {
|
||||
const allArgs = tokens.slice(1);
|
||||
const positionalArgs = allArgs.filter(arg => !arg.startsWith("-"));
|
||||
|
||||
// Check if it's "bd test"
|
||||
if (positionalArgs.length >= 2 && positionalArgs[0] === "bd" && positionalArgs[1] === "test") {
|
||||
// Check if cwd is the bun repo root or test folder
|
||||
const isBunRepoRoot = cwd === "/workspace/bun" || cwd.endsWith("/bun");
|
||||
const isTestFolder = cwd.endsWith("/bun/test");
|
||||
|
||||
if (isBunRepoRoot || isTestFolder) {
|
||||
// Check if there's a file path argument (looks like a path: contains / or has test extension)
|
||||
const hasFilePath = positionalArgs
|
||||
.slice(2)
|
||||
.some(
|
||||
arg =>
|
||||
arg.includes("/") ||
|
||||
arg.endsWith(".test.ts") ||
|
||||
arg.endsWith(".test.js") ||
|
||||
arg.endsWith(".test.tsx") ||
|
||||
arg.endsWith(".test.jsx"),
|
||||
);
|
||||
|
||||
if (!hasFilePath) {
|
||||
denyWithReason(
|
||||
"error: `bun bd test` from repo root or test folder will run all tests. Use `bun bd test <path>` with a specific test file.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Allow the command to proceed
|
||||
process.exit(0);
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"hooks": {
|
||||
"PreToolUse": [
|
||||
{
|
||||
"matcher": "Bash",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/pre-bash-zig-build.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Write|Edit|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "\"$CLAUDE_PROJECT_DIR\"/.claude/hooks/post-edit-zig-format.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,184 +0,0 @@
|
||||
---
|
||||
name: implementing-jsc-classes-cpp
|
||||
description: Implements JavaScript classes in C++ using JavaScriptCore. Use when creating new JS classes with C++ bindings, prototypes, or constructors.
|
||||
---
|
||||
|
||||
# Implementing JavaScript Classes in C++
|
||||
|
||||
## Class Structure
|
||||
|
||||
For publicly accessible Constructor and Prototype, create 3 classes:
|
||||
|
||||
1. **`class Foo : public JSC::DestructibleObject`** - if C++ fields exist; otherwise use `JSC::constructEmptyObject` with `putDirectOffset`
|
||||
2. **`class FooPrototype : public JSC::JSNonFinalObject`**
|
||||
3. **`class FooConstructor : public JSC::InternalFunction`**
|
||||
|
||||
No public constructor? Only Prototype and class needed.
|
||||
|
||||
## Iso Subspaces
|
||||
|
||||
Classes with C++ fields need subspaces in:
|
||||
|
||||
- `src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h`
|
||||
- `src/bun.js/bindings/webcore/DOMIsoSubspaces.h`
|
||||
|
||||
```cpp
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) {
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceForMyClassT.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForMyClassT = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceForMyClassT.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceForMyClassT = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
```
|
||||
|
||||
## Property Definitions
|
||||
|
||||
```cpp
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsFooProtoFuncMethod);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsFooGetter_property);
|
||||
|
||||
static const HashTableValue JSFooPrototypeTableValues[] = {
|
||||
{ "property"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsFooGetter_property, 0 } },
|
||||
{ "method"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsFooProtoFuncMethod, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
## Prototype Class
|
||||
|
||||
```cpp
|
||||
class JSFooPrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSFooPrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure) {
|
||||
JSFooPrototype* prototype = new (NotNull, allocateCell<JSFooPrototype>(vm)) JSFooPrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.plainObjectSpace(); }
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSFooPrototype(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure) {}
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
void JSFooPrototype::finishCreation(VM& vm) {
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSFoo::info(), JSFooPrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
```
|
||||
|
||||
## Getter/Setter/Function Definitions
|
||||
|
||||
```cpp
|
||||
// Getter
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsFooGetter_prop, (JSGlobalObject* globalObject, EncodedJSValue thisValue, PropertyName)) {
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
JSFoo* thisObject = jsDynamicCast<JSFoo*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSFoo"_s, "prop"_s);
|
||||
return {};
|
||||
}
|
||||
return JSValue::encode(jsBoolean(thisObject->value()));
|
||||
}
|
||||
|
||||
// Function
|
||||
JSC_DEFINE_HOST_FUNCTION(jsFooProtoFuncMethod, (JSGlobalObject* globalObject, CallFrame* callFrame)) {
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto* thisObject = jsDynamicCast<JSFoo*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "Foo"_s, "method"_s);
|
||||
return {};
|
||||
}
|
||||
return JSValue::encode(thisObject->doSomething(vm, globalObject));
|
||||
}
|
||||
```
|
||||
|
||||
## Constructor Class
|
||||
|
||||
```cpp
|
||||
class JSFooConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSFooConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype) {
|
||||
JSFooConstructor* constructor = new (NotNull, JSC::allocateCell<JSFooConstructor>(vm)) JSFooConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm) { return &vm.internalFunctionSpace(); }
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSFooConstructor(JSC::VM& vm, JSC::Structure* structure) : Base(vm, structure, callFoo, constructFoo) {}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype) {
|
||||
Base::finishCreation(vm, 0, "Foo"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Structure Caching
|
||||
|
||||
Add to `ZigGlobalObject.h`:
|
||||
|
||||
```cpp
|
||||
JSC::LazyClassStructure m_JSFooClassStructure;
|
||||
```
|
||||
|
||||
Initialize in `ZigGlobalObject.cpp`:
|
||||
|
||||
```cpp
|
||||
m_JSFooClassStructure.initLater([](LazyClassStructure::Initializer& init) {
|
||||
Bun::initJSFooClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Visit in `visitChildrenImpl`:
|
||||
|
||||
```cpp
|
||||
m_JSFooClassStructure.visit(visitor);
|
||||
```
|
||||
|
||||
## Expose to Zig
|
||||
|
||||
```cpp
|
||||
extern "C" JSC::EncodedJSValue Bun__JSFooConstructor(Zig::GlobalObject* globalObject) {
|
||||
return JSValue::encode(globalObject->m_JSFooClassStructure.constructor(globalObject));
|
||||
}
|
||||
|
||||
extern "C" EncodedJSValue Bun__Foo__toJS(Zig::GlobalObject* globalObject, Foo* foo) {
|
||||
auto* structure = globalObject->m_JSFooClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSFoo::create(globalObject->vm(), structure, globalObject, WTFMove(foo)));
|
||||
}
|
||||
```
|
||||
|
||||
Include `#include "root.h"` at the top of C++ files.
|
||||
@@ -1,206 +0,0 @@
|
||||
---
|
||||
name: implementing-jsc-classes-zig
|
||||
description: Creates JavaScript classes using Bun's Zig bindings generator (.classes.ts). Use when implementing new JS APIs in Zig with JSC integration.
|
||||
---
|
||||
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
Bridge JavaScript and Zig through `.classes.ts` definitions and Zig implementations.
|
||||
|
||||
## Architecture
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files connecting them)
|
||||
|
||||
## Class Definition (.classes.ts)
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: { args: 1 },
|
||||
encoding: { getter: true, cache: true },
|
||||
fatal: { getter: true },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Options:
|
||||
|
||||
- `name`: Class name
|
||||
- `constructor`: Has public constructor
|
||||
- `JSType`: "object", "function", etc.
|
||||
- `finalize`: Needs cleanup
|
||||
- `proto`: Properties/methods
|
||||
- `cache`: Cache property values via WriteBarrier
|
||||
|
||||
## Zig Implementation
|
||||
|
||||
```zig
|
||||
pub const TextDecoder = struct {
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
return bun.new(TextDecoder, .{ .encoding = "utf-8", .fatal = false });
|
||||
}
|
||||
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
const args = callFrame.arguments();
|
||||
if (args.len < 1 or args.ptr[0].isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null", .{});
|
||||
}
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**Key patterns:**
|
||||
|
||||
- Use `bun.JSError!JSValue` return type for error handling
|
||||
- Use `globalObject` not `ctx`
|
||||
- `deinit()` for cleanup, `finalize()` called by GC
|
||||
- Update `src/bun.js/bindings/generated_classes_list.zig`
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
```zig
|
||||
const args = callFrame.arguments();
|
||||
const first_arg = args.ptr[0]; // Access as slice
|
||||
const argCount = args.len;
|
||||
const thisValue = callFrame.thisValue();
|
||||
```
|
||||
|
||||
## Property Caching
|
||||
|
||||
For `cache: true` properties, generated accessors:
|
||||
|
||||
```zig
|
||||
// Get cached value
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero) return null;
|
||||
return result;
|
||||
}
|
||||
|
||||
// Set cached value
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
```zig
|
||||
pub fn method(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
const args = callFrame.arguments();
|
||||
if (args.len < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
## Memory Management
|
||||
|
||||
```zig
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
this._encoding.deref();
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src());
|
||||
this.deinit();
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
```
|
||||
|
||||
## Creating a New Binding
|
||||
|
||||
1. Define interface in `.classes.ts`:
|
||||
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: { args: 1 },
|
||||
myProperty: { getter: true, cache: true },
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
2. Implement in `.zig`:
|
||||
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
pub fn constructor(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!*MyClass {
|
||||
return MyClass.new(.{ .value = "" });
|
||||
}
|
||||
|
||||
pub fn myMethod(this: *MyClass, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue {
|
||||
return JSC.JSValue.jsUndefined();
|
||||
}
|
||||
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
pub fn deinit(this: *MyClass) void {}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. Add to `src/bun.js/bindings/generated_classes_list.zig`
|
||||
|
||||
## Generated Components
|
||||
|
||||
- **C++ Classes**: `JSMyClass`, `JSMyClassPrototype`, `JSMyClassConstructor`
|
||||
- **Method Bindings**: `MyClassPrototype__myMethodCallback`
|
||||
- **Property Accessors**: `MyClassPrototype__myPropertyGetterWrap`
|
||||
- **Zig Bindings**: External function declarations, cached value accessors
|
||||
@@ -1,222 +0,0 @@
|
||||
---
|
||||
name: writing-bundler-tests
|
||||
description: Guides writing bundler tests using itBundled/expectBundled in test/bundler/. Use when creating or modifying bundler, transpiler, or code transformation tests.
|
||||
---
|
||||
|
||||
# Writing Bundler Tests
|
||||
|
||||
Bundler tests use `itBundled()` from `test/bundler/expectBundled.ts` to test Bun's bundler.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
```typescript
|
||||
import { describe } from "bun:test";
|
||||
import { itBundled, dedent } from "./expectBundled";
|
||||
|
||||
describe("bundler", () => {
|
||||
itBundled("category/TestName", {
|
||||
files: {
|
||||
"index.js": `console.log("hello");`,
|
||||
},
|
||||
run: {
|
||||
stdout: "hello",
|
||||
},
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
Test ID format: `category/TestName` (e.g., `banner/CommentBanner`, `minify/Empty`)
|
||||
|
||||
## File Setup
|
||||
|
||||
```typescript
|
||||
{
|
||||
files: {
|
||||
"index.js": `console.log("test");`,
|
||||
"lib.ts": `export const foo = 123;`,
|
||||
"nested/file.js": `export default {};`,
|
||||
},
|
||||
entryPoints: ["index.js"], // defaults to first file
|
||||
runtimeFiles: { // written AFTER bundling
|
||||
"extra.js": `console.log("added later");`,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Bundler Options
|
||||
|
||||
```typescript
|
||||
{
|
||||
outfile: "/out.js",
|
||||
outdir: "/out",
|
||||
format: "esm" | "cjs" | "iife",
|
||||
target: "bun" | "browser" | "node",
|
||||
|
||||
// Minification
|
||||
minifyWhitespace: true,
|
||||
minifyIdentifiers: true,
|
||||
minifySyntax: true,
|
||||
|
||||
// Code manipulation
|
||||
banner: "// copyright",
|
||||
footer: "// end",
|
||||
define: { "PROD": "true" },
|
||||
external: ["lodash"],
|
||||
|
||||
// Advanced
|
||||
sourceMap: "inline" | "external",
|
||||
splitting: true,
|
||||
treeShaking: true,
|
||||
drop: ["console"],
|
||||
}
|
||||
```
|
||||
|
||||
## Runtime Verification
|
||||
|
||||
```typescript
|
||||
{
|
||||
run: {
|
||||
stdout: "expected output", // exact match
|
||||
stdout: /regex/, // pattern match
|
||||
partialStdout: "contains this", // substring
|
||||
stderr: "error output",
|
||||
exitCode: 1,
|
||||
env: { NODE_ENV: "production" },
|
||||
runtime: "bun" | "node",
|
||||
|
||||
// Runtime errors
|
||||
error: "ReferenceError: x is not defined",
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Bundle Errors/Warnings
|
||||
|
||||
```typescript
|
||||
{
|
||||
bundleErrors: {
|
||||
"/file.js": ["error message 1", "error message 2"],
|
||||
},
|
||||
bundleWarnings: {
|
||||
"/file.js": ["warning message"],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Dead Code Elimination (DCE)
|
||||
|
||||
Add markers in source code:
|
||||
|
||||
```javascript
|
||||
// KEEP - this should survive
|
||||
const used = 1;
|
||||
|
||||
// REMOVE - this should be eliminated
|
||||
const unused = 2;
|
||||
```
|
||||
|
||||
```typescript
|
||||
{
|
||||
dce: true,
|
||||
dceKeepMarkerCount: 5, // expected KEEP markers
|
||||
}
|
||||
```
|
||||
|
||||
## Capture Pattern
|
||||
|
||||
Verify exact transpilation with `capture()`:
|
||||
|
||||
```typescript
|
||||
itBundled("string/Folding", {
|
||||
files: {
|
||||
"index.ts": `capture(\`\${1 + 1}\`);`,
|
||||
},
|
||||
capture: ['"2"'], // expected captured value
|
||||
minifySyntax: true,
|
||||
});
|
||||
```
|
||||
|
||||
## Post-Bundle Assertions
|
||||
|
||||
```typescript
|
||||
{
|
||||
onAfterBundle(api) {
|
||||
api.expectFile("out.js").toContain("console.log");
|
||||
api.assertFileExists("out.js");
|
||||
|
||||
const content = api.readFile("out.js");
|
||||
expect(content).toMatchSnapshot();
|
||||
|
||||
const values = api.captureFile("out.js");
|
||||
expect(values).toEqual(["2"]);
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
**Simple output verification:**
|
||||
|
||||
```typescript
|
||||
itBundled("banner/Comment", {
|
||||
banner: "// copyright",
|
||||
files: { "a.js": `console.log("Hello")` },
|
||||
onAfterBundle(api) {
|
||||
api.expectFile("out.js").toContain("// copyright");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Multi-file CJS/ESM interop:**
|
||||
|
||||
```typescript
|
||||
itBundled("cjs/ImportSyntax", {
|
||||
files: {
|
||||
"entry.js": `import lib from './lib.cjs'; console.log(lib);`,
|
||||
"lib.cjs": `exports.foo = 'bar';`,
|
||||
},
|
||||
run: { stdout: '{"foo":"bar"}' },
|
||||
});
|
||||
```
|
||||
|
||||
**Error handling:**
|
||||
|
||||
```typescript
|
||||
itBundled("edgecase/InvalidLoader", {
|
||||
files: { "index.js": `...` },
|
||||
bundleErrors: {
|
||||
"index.js": ["Unsupported loader type"],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Test Organization
|
||||
|
||||
```text
|
||||
test/bundler/
|
||||
├── bundler_banner.test.ts
|
||||
├── bundler_string.test.ts
|
||||
├── bundler_minify.test.ts
|
||||
├── bundler_cjs.test.ts
|
||||
├── bundler_edgecase.test.ts
|
||||
├── bundler_splitting.test.ts
|
||||
├── css/
|
||||
├── transpiler/
|
||||
└── expectBundled.ts
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
bun bd test test/bundler/bundler_banner.test.ts
|
||||
BUN_BUNDLER_TEST_FILTER="banner/Comment" bun bd test bundler_banner.test.ts
|
||||
BUN_BUNDLER_TEST_DEBUG=1 bun bd test bundler_minify.test.ts
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- Use `dedent` for readable multi-line code
|
||||
- File paths are relative (e.g., `/index.js`)
|
||||
- Use `capture()` to verify exact transpilation results
|
||||
- Use `.toMatchSnapshot()` for complex outputs
|
||||
- Pass array to `run` for multiple test scenarios
|
||||
@@ -1,94 +0,0 @@
|
||||
---
|
||||
name: writing-dev-server-tests
|
||||
description: Guides writing HMR/Dev Server tests in test/bake/. Use when creating or modifying dev server, hot reloading, or bundling tests.
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server Tests
|
||||
|
||||
Dev server tests validate hot-reloading robustness and reliability.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities: `devTest`, `prodTest`, `devAndProductionTest`, `Dev` class, `Client` class
|
||||
- `test/bake/client-fixture.mjs` - subprocess for `Client` (page loading, IPC queries)
|
||||
- `test/bake/dev/*.test.ts` - dev server and hot reload tests
|
||||
- `test/bake/dev-and-prod.ts` - tests running on both dev and production mode
|
||||
|
||||
## Test Categories
|
||||
|
||||
- `bundle.test.ts` - DevServer-specific bundling bugs
|
||||
- `css.test.ts` - CSS bundling issues
|
||||
- `plugins.test.ts` - development mode plugins
|
||||
- `ecosystem.test.ts` - library compatibility (prefer concrete bugs over full package tests)
|
||||
- `esm.test.ts` - ESM features in development
|
||||
- `html.test.ts` - HTML file handling
|
||||
- `react-spa.test.ts` - React, react-refresh transform, server components
|
||||
- `sourcemap.test.ts` - source map correctness
|
||||
|
||||
## devTest Basics
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `console.log("hello");`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", { find: "Hello", replace: "World" });
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", { find: "World", replace: "Bar" });
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Key APIs
|
||||
|
||||
- **`files`**: Initial filesystem state
|
||||
- **`dev.fetch()`**: HTTP requests
|
||||
- **`dev.client()`**: Opens browser instance
|
||||
- **`dev.write/patch/delete`**: Filesystem mutations (wait for hot-reload automatically)
|
||||
- **`c.expectMessage()`**: Assert console.log output
|
||||
- **`c.expectReload()`**: Wrap code that causes hard reload
|
||||
|
||||
**Important**: Use `dev.write/patch/delete` instead of `node:fs` - they wait for hot-reload.
|
||||
|
||||
## Testing Errors
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `<!DOCTYPE html><html><head></head><body><script type="module" src="/script.ts"></script></body></html>`,
|
||||
"script.ts": `import data from "./data"; console.log(data);`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Specify expected errors with the `errors` option:
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
@@ -1,268 +0,0 @@
|
||||
---
|
||||
name: zig-system-calls
|
||||
description: Guides using bun.sys for system calls and file I/O in Zig. Use when implementing file operations instead of std.fs or std.posix.
|
||||
---
|
||||
|
||||
# System Calls & File I/O in Zig
|
||||
|
||||
Use `bun.sys` instead of `std.fs` or `std.posix` for cross-platform syscalls with proper error handling.
|
||||
|
||||
## bun.sys.File (Preferred)
|
||||
|
||||
For most file operations, use the `bun.sys.File` wrapper:
|
||||
|
||||
```zig
|
||||
const File = bun.sys.File;
|
||||
|
||||
const file = switch (File.open(path, bun.O.RDWR, 0o644)) {
|
||||
.result => |f| f,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
// Read/write
|
||||
_ = try file.read(buffer).unwrap();
|
||||
_ = try file.writeAll(data).unwrap();
|
||||
|
||||
// Get file info
|
||||
const stat = try file.stat().unwrap();
|
||||
const size = try file.getEndPos().unwrap();
|
||||
|
||||
// std.io compatible
|
||||
const reader = file.reader();
|
||||
const writer = file.writer();
|
||||
```
|
||||
|
||||
### Complete Example
|
||||
|
||||
```zig
|
||||
const File = bun.sys.File;
|
||||
|
||||
pub fn writeFile(path: [:0]const u8, data: []const u8) File.WriteError!void {
|
||||
const file = switch (File.open(path, bun.O.WRONLY | bun.O.CREAT | bun.O.TRUNC, 0o664)) {
|
||||
.result => |f| f,
|
||||
.err => |err| return err.toError(),
|
||||
};
|
||||
defer file.close();
|
||||
|
||||
_ = switch (file.writeAll(data)) {
|
||||
.result => {},
|
||||
.err => |err| return err.toError(),
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Why bun.sys?
|
||||
|
||||
| Aspect | bun.sys | std.fs/std.posix |
|
||||
| ----------- | -------------------------------- | ------------------- |
|
||||
| Return Type | `Maybe(T)` with detailed Error | Generic error union |
|
||||
| Windows | Full support with libuv fallback | Limited/POSIX-only |
|
||||
| Error Info | errno, syscall tag, path, fd | errno only |
|
||||
| EINTR | Automatic retry | Manual handling |
|
||||
|
||||
## Error Handling with Maybe(T)
|
||||
|
||||
`bun.sys` functions return `Maybe(T)` - a tagged union:
|
||||
|
||||
```zig
|
||||
const sys = bun.sys;
|
||||
|
||||
// Pattern 1: Switch on result/error
|
||||
switch (sys.read(fd, buffer)) {
|
||||
.result => |bytes_read| {
|
||||
// use bytes_read
|
||||
},
|
||||
.err => |err| {
|
||||
// err.errno, err.syscall, err.fd, err.path
|
||||
if (err.getErrno() == .AGAIN) {
|
||||
// handle EAGAIN
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// Pattern 2: Unwrap with try (converts to Zig error)
|
||||
const bytes = try sys.read(fd, buffer).unwrap();
|
||||
|
||||
// Pattern 3: Unwrap with default
|
||||
const value = sys.stat(path).unwrapOr(default_stat);
|
||||
```
|
||||
|
||||
## Low-Level File Operations
|
||||
|
||||
Only use these when `bun.sys.File` doesn't meet your needs.
|
||||
|
||||
### Opening Files
|
||||
|
||||
```zig
|
||||
const sys = bun.sys;
|
||||
|
||||
// Use bun.O flags (cross-platform normalized)
|
||||
const fd = switch (sys.open(path, bun.O.RDONLY, 0)) {
|
||||
.result => |fd| fd,
|
||||
.err => |err| return .{ .err = err },
|
||||
};
|
||||
defer fd.close();
|
||||
|
||||
// Common flags
|
||||
bun.O.RDONLY, bun.O.WRONLY, bun.O.RDWR
|
||||
bun.O.CREAT, bun.O.TRUNC, bun.O.APPEND
|
||||
bun.O.NONBLOCK, bun.O.DIRECTORY
|
||||
```
|
||||
|
||||
### Reading & Writing
|
||||
|
||||
```zig
|
||||
// Single read (may return less than buffer size)
|
||||
switch (sys.read(fd, buffer)) {
|
||||
.result => |n| { /* n bytes read */ },
|
||||
.err => |err| { /* handle error */ },
|
||||
}
|
||||
|
||||
// Read until EOF or buffer full
|
||||
const total = try sys.readAll(fd, buffer).unwrap();
|
||||
|
||||
// Position-based read/write
|
||||
sys.pread(fd, buffer, offset)
|
||||
sys.pwrite(fd, data, offset)
|
||||
|
||||
// Vector I/O
|
||||
sys.readv(fd, iovecs)
|
||||
sys.writev(fd, iovecs)
|
||||
```
|
||||
|
||||
### File Info
|
||||
|
||||
```zig
|
||||
sys.stat(path) // Follow symlinks
|
||||
sys.lstat(path) // Don't follow symlinks
|
||||
sys.fstat(fd) // From file descriptor
|
||||
sys.fstatat(fd, path)
|
||||
|
||||
// Linux-only: faster selective stat
|
||||
sys.statx(path, &.{ .size, .mtime })
|
||||
```
|
||||
|
||||
### Path Operations
|
||||
|
||||
```zig
|
||||
sys.unlink(path)
|
||||
sys.unlinkat(dir_fd, path)
|
||||
sys.rename(from, to)
|
||||
sys.renameat(from_dir, from, to_dir, to)
|
||||
sys.readlink(path, buf)
|
||||
sys.readlinkat(fd, path, buf)
|
||||
sys.link(T, src, dest)
|
||||
sys.linkat(src_fd, src, dest_fd, dest)
|
||||
sys.symlink(target, dest)
|
||||
sys.symlinkat(target, dirfd, dest)
|
||||
sys.mkdir(path, mode)
|
||||
sys.mkdirat(dir_fd, path, mode)
|
||||
sys.rmdir(path)
|
||||
```
|
||||
|
||||
### Permissions
|
||||
|
||||
```zig
|
||||
sys.chmod(path, mode)
|
||||
sys.fchmod(fd, mode)
|
||||
sys.fchmodat(fd, path, mode, flags)
|
||||
sys.chown(path, uid, gid)
|
||||
sys.fchown(fd, uid, gid)
|
||||
```
|
||||
|
||||
### Closing File Descriptors
|
||||
|
||||
Close is on `bun.FD`:
|
||||
|
||||
```zig
|
||||
fd.close(); // Asserts on error (use in defer)
|
||||
|
||||
// Or if you need error info:
|
||||
if (fd.closeAllowingBadFileDescriptor(null)) |err| {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
## Directory Operations
|
||||
|
||||
```zig
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
const cwd = try sys.getcwd(&buf).unwrap();
|
||||
const cwdZ = try sys.getcwdZ(&buf).unwrap(); // Zero-terminated
|
||||
sys.chdir(path, destination)
|
||||
```
|
||||
|
||||
### Directory Iteration
|
||||
|
||||
Use `bun.DirIterator` instead of `std.fs.Dir.Iterator`:
|
||||
|
||||
```zig
|
||||
var iter = bun.iterateDir(dir_fd);
|
||||
while (true) {
|
||||
switch (iter.next()) {
|
||||
.result => |entry| {
|
||||
if (entry) |e| {
|
||||
const name = e.name.slice();
|
||||
const kind = e.kind; // .file, .directory, .sym_link, etc.
|
||||
} else {
|
||||
break; // End of directory
|
||||
}
|
||||
},
|
||||
.err => |err| return .{ .err = err },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Socket Operations
|
||||
|
||||
**Important**: `bun.sys` has limited socket support. For network I/O:
|
||||
|
||||
- **Non-blocking sockets**: Use `uws.Socket` (libuwebsockets) exclusively
|
||||
- **Pipes/blocking I/O**: Use `PipeReader.zig` and `PipeWriter.zig`
|
||||
|
||||
Available in bun.sys:
|
||||
|
||||
```zig
|
||||
sys.setsockopt(fd, level, optname, value)
|
||||
sys.socketpair(domain, socktype, protocol, nonblocking_status)
|
||||
```
|
||||
|
||||
Do NOT use `bun.sys` for socket read/write - use `uws.Socket` instead.
|
||||
|
||||
## Other Operations
|
||||
|
||||
```zig
|
||||
sys.ftruncate(fd, size)
|
||||
sys.lseek(fd, offset, whence)
|
||||
sys.dup(fd)
|
||||
sys.dupWithFlags(fd, flags)
|
||||
sys.fcntl(fd, cmd, arg)
|
||||
sys.pipe()
|
||||
sys.mmap(...)
|
||||
sys.munmap(memory)
|
||||
sys.access(path, mode)
|
||||
sys.futimens(fd, atime, mtime)
|
||||
sys.utimens(path, atime, mtime)
|
||||
```
|
||||
|
||||
## Error Type
|
||||
|
||||
```zig
|
||||
const err: bun.sys.Error = ...;
|
||||
err.errno // Raw errno value
|
||||
err.getErrno() // As std.posix.E enum
|
||||
err.syscall // Which syscall failed (Tag enum)
|
||||
err.fd // Optional: file descriptor
|
||||
err.path // Optional: path string
|
||||
```
|
||||
|
||||
## Key Points
|
||||
|
||||
- Prefer `bun.sys.File` wrapper for most file operations
|
||||
- Use low-level `bun.sys` functions only when needed
|
||||
- Use `bun.O.*` flags instead of `std.os.O.*`
|
||||
- Handle `Maybe(T)` with switch or `.unwrap()`
|
||||
- Use `defer fd.close()` for cleanup
|
||||
- EINTR is handled automatically in most functions
|
||||
- For sockets, use `uws.Socket` not `bun.sys`
|
||||
147
.coderabbit.yaml
147
.coderabbit.yaml
@@ -1,147 +0,0 @@
|
||||
language: en-US
|
||||
|
||||
issue_enrichment:
|
||||
auto_enrich:
|
||||
enabled: false
|
||||
|
||||
reviews:
|
||||
profile: assertive
|
||||
request_changes_workflow: false
|
||||
high_level_summary: false
|
||||
high_level_summary_placeholder: "@coderabbitai summary"
|
||||
high_level_summary_in_walkthrough: true
|
||||
auto_title_placeholder: "@coderabbitai"
|
||||
review_status: false
|
||||
commit_status: false
|
||||
fail_commit_status: false
|
||||
collapse_walkthrough: false
|
||||
changed_files_summary: true
|
||||
sequence_diagrams: false
|
||||
estimate_code_review_effort: false
|
||||
assess_linked_issues: true
|
||||
related_issues: true
|
||||
related_prs: true
|
||||
suggested_labels: false
|
||||
suggested_reviewers: true
|
||||
in_progress_fortune: false
|
||||
poem: false
|
||||
abort_on_close: true
|
||||
|
||||
path_filters:
|
||||
- "!test/js/node/test/"
|
||||
|
||||
auto_review:
|
||||
enabled: true
|
||||
auto_incremental_review: true
|
||||
drafts: false
|
||||
|
||||
finishing_touches:
|
||||
docstrings:
|
||||
enabled: false
|
||||
unit_tests:
|
||||
enabled: false
|
||||
|
||||
pre_merge_checks:
|
||||
docstrings:
|
||||
mode: off
|
||||
title:
|
||||
mode: warning
|
||||
description:
|
||||
mode: warning
|
||||
issue_assessment:
|
||||
mode: warning
|
||||
|
||||
tools:
|
||||
shellcheck:
|
||||
enabled: true
|
||||
ruff:
|
||||
enabled: true
|
||||
markdownlint:
|
||||
enabled: true
|
||||
github-checks:
|
||||
enabled: true
|
||||
timeout_ms: 90000
|
||||
languagetool:
|
||||
enabled: true
|
||||
enabled_only: false
|
||||
level: default
|
||||
biome:
|
||||
enabled: true
|
||||
hadolint:
|
||||
enabled: true
|
||||
swiftlint:
|
||||
enabled: true
|
||||
phpstan:
|
||||
enabled: true
|
||||
level: default
|
||||
phpmd:
|
||||
enabled: true
|
||||
phpcs:
|
||||
enabled: true
|
||||
golangci-lint:
|
||||
enabled: true
|
||||
yamllint:
|
||||
enabled: true
|
||||
gitleaks:
|
||||
enabled: true
|
||||
checkov:
|
||||
enabled: true
|
||||
detekt:
|
||||
enabled: true
|
||||
eslint:
|
||||
enabled: true
|
||||
flake8:
|
||||
enabled: true
|
||||
rubocop:
|
||||
enabled: true
|
||||
buf:
|
||||
enabled: true
|
||||
regal:
|
||||
enabled: true
|
||||
actionlint:
|
||||
enabled: true
|
||||
pmd:
|
||||
enabled: true
|
||||
clang:
|
||||
enabled: true
|
||||
cppcheck:
|
||||
enabled: true
|
||||
semgrep:
|
||||
enabled: true
|
||||
circleci:
|
||||
enabled: true
|
||||
clippy:
|
||||
enabled: true
|
||||
sqlfluff:
|
||||
enabled: true
|
||||
prismaLint:
|
||||
enabled: true
|
||||
pylint:
|
||||
enabled: true
|
||||
oxc:
|
||||
enabled: true
|
||||
shopifyThemeCheck:
|
||||
enabled: true
|
||||
luacheck:
|
||||
enabled: true
|
||||
brakeman:
|
||||
enabled: true
|
||||
dotenvLint:
|
||||
enabled: true
|
||||
htmlhint:
|
||||
enabled: true
|
||||
checkmake:
|
||||
enabled: true
|
||||
osvScanner:
|
||||
enabled: true
|
||||
|
||||
chat:
|
||||
auto_reply: true
|
||||
|
||||
knowledge_base:
|
||||
opt_out: false
|
||||
code_guidelines:
|
||||
enabled: true
|
||||
filePatterns:
|
||||
- "**/.cursor/rules/*.mdc"
|
||||
- "**/CLAUDE.md"
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"snapshot": "snapshot-20250706-71021aff-cc0d-4a7f-a468-d443b16c4bf1",
|
||||
"install": "bun install",
|
||||
"terminals": [
|
||||
{
|
||||
"name": "bun build",
|
||||
"command": "bun run build"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
bench
|
||||
vendor
|
||||
*-fixture.{js,ts}
|
||||
zig-cache
|
||||
packages/bun-uws/fuzzing
|
||||
build
|
||||
1539
.docker/chrome.json
Normal file
1539
.docker/chrome.json
Normal file
File diff suppressed because it is too large
Load Diff
14
.docker/chromium.pref
Normal file
14
.docker/chromium.pref
Normal file
@@ -0,0 +1,14 @@
|
||||
# Note: 2 blank lines are required between entries
|
||||
Package: *
|
||||
Pin: release a=eoan
|
||||
Pin-Priority: 500
|
||||
|
||||
Package: *
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 300
|
||||
|
||||
# Pattern includes 'chromium', 'chromium-browser' and similarly
|
||||
# named dependencies:
|
||||
Package: chromium*
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 700
|
||||
8
.docker/copy-bun-binary.sh
Normal file
8
.docker/copy-bun-binary.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
name=$(openssl rand -hex 12)
|
||||
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
|
||||
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
|
||||
echo -e "bun-binary-$name"
|
||||
3
.docker/debian.list
Normal file
3
.docker/debian.list
Normal file
@@ -0,0 +1,3 @@
|
||||
deb http://deb.debian.org/debian buster main
|
||||
deb http://deb.debian.org/debian buster-updates main
|
||||
deb http://deb.debian.org/debian-security buster/updates main
|
||||
34
.docker/dockerfile-common.sh
Normal file
34
.docker/dockerfile-common.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
export BUILDKIT_ARCH=$(uname -m)
|
||||
export ARCH=${BUILDKIT_ARCH}
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
|
||||
echo "Unsupported platform: $BUILDKIT_ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export BUILD_ID=$(cat build-id)
|
||||
export CONTAINER_NAME=bun-linux-$ARCH
|
||||
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
|
||||
export TEMP=/tmp/bun-0.0.$BUILD_ID
|
||||
11
.docker/pull.sh
Normal file
11
.docker/pull.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
|
||||
|
||||
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest
|
||||
47
.docker/run-dockerfile.sh
Normal file
47
.docker/run-dockerfile.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
source "dockerfile-common.sh"
|
||||
|
||||
export $CONTAINER_NAME=$CONTAINER_NAME-local
|
||||
|
||||
rm -rf $TEMP
|
||||
mkdir -p $TEMP
|
||||
|
||||
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
|
||||
|
||||
if (($?)); then
|
||||
echo "Failed to build container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
id=$(docker create $CONTAINER_NAME:latest)
|
||||
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
|
||||
if (($?)); then
|
||||
echo "Failed to cp container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $TEMP
|
||||
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
|
||||
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
|
||||
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
|
||||
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
|
||||
docker rm -v $id
|
||||
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
|
||||
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
|
||||
|
||||
case $(uname -s) in
|
||||
"Linux") target="linux" ;;
|
||||
*) target="other" ;;
|
||||
esac
|
||||
|
||||
if [ "$target" = "linux" ]; then
|
||||
if command -v bun --version >/dev/null; then
|
||||
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
|
||||
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Saved to:"
|
||||
echo $debug_abs
|
||||
echo $abs
|
||||
9
.docker/run-test.sh
Executable file
9
.docker/run-test.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
bun install
|
||||
bun install --cwd ./test/snippets
|
||||
bun install --cwd ./test/scripts
|
||||
|
||||
make $BUN_TEST_NAME
|
||||
5
.docker/runner.sh
Normal file
5
.docker/runner.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest
|
||||
5
.docker/unit-tests.sh
Normal file
5
.docker/unit-tests.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest
|
||||
@@ -1,21 +1,17 @@
|
||||
**/*.a
|
||||
**/*.o
|
||||
**/.next
|
||||
**/CMakeCache.txt
|
||||
**/node_modules
|
||||
.git
|
||||
examples
|
||||
node_modules
|
||||
**/node_modules
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-out
|
||||
zig-build
|
||||
**/*.o
|
||||
**/*.a
|
||||
|
||||
examples
|
||||
|
||||
**/.next
|
||||
.git
|
||||
src/bun.js/WebKit
|
||||
**/CMakeCache.txt
|
||||
packages/**/bun
|
||||
packages/**/bun-profile
|
||||
src/bun.js/WebKit
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-build
|
||||
.zig-cache
|
||||
zig-out
|
||||
build
|
||||
vendor
|
||||
node_modules
|
||||
*.trace
|
||||
|
||||
packages/bun-uws/fuzzing
|
||||
zig-cache
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# https://EditorConfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
@@ -1,10 +0,0 @@
|
||||
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
|
||||
# avoid mucking up blames.
|
||||
#
|
||||
# To configure git to use this, run:
|
||||
#
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
dedd433cbf2e2fe38e51bc166e08fbcc601ad42b # JSValue.undefined -> .jsUndefined()
|
||||
6b4662ff55f58247cc2fd22e85b4f9805b0950a5 # JSValue.jsUndefined() -> .js_undefined
|
||||
71
.gitattributes
vendored
71
.gitattributes
vendored
@@ -1,58 +1,33 @@
|
||||
*.css text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.js text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.jsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.tsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.ts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.c text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.toml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.json text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
*.lockb binary diff=lockb
|
||||
|
||||
.vscode/launch.json linguist-generated
|
||||
src/api/schema.d.ts linguist-generated
|
||||
fixture.*.c linguist-generated
|
||||
src/api/schema.js linguist-generated
|
||||
*-fixture* linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
|
||||
src/bun.js/bindings/headers.h linguist-generated
|
||||
src/bun.js/bindings/headers.zig linguist-generated
|
||||
|
||||
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated
|
||||
|
||||
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
|
||||
*.lockb binary diff=lockb
|
||||
*.zig text eol=lf
|
||||
src/bun.js/bindings/simdutf.cpp linguist-vendored
|
||||
src/bun.js/bindings/simdutf.h linguist-vendored
|
||||
|
||||
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
|
||||
src/js/out/WebCoreJSBuiltins.h linguist-generated
|
||||
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
|
||||
|
||||
src/bun.js/bindings/headers.h linguist-generated
|
||||
src/bun.js/bindings/headers.zig linguist-generated
|
||||
|
||||
src/bun.js/bindings/JSSink.h linguist-generated
|
||||
src/bun.js/bindings/JSSink.zig linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
|
||||
|
||||
docs/**/* linguist-documentation
|
||||
|
||||
# Don't count tests in the language stats - https://github.com/github-linguist/linguist/blob/master/docs/overrides.md
|
||||
test/**/* linguist-documentation
|
||||
bench/**/* linguist-documentation
|
||||
examples/**/* linguist-documentation
|
||||
|
||||
vendor/*.c linguist-vendored
|
||||
vendor/brotli/** linguist-vendored
|
||||
|
||||
test/js/node/test/fixtures linguist-vendored
|
||||
test/js/node/test/common linguist-vendored
|
||||
|
||||
test/js/bun/css/files linguist-vendored
|
||||
|
||||
.vscode/*.json linguist-language=JSON-with-Comments
|
||||
src/cli/init/tsconfig.default.json linguist-language=JSON-with-Comments
|
||||
9
.github/CODEOWNERS
vendored
9
.github/CODEOWNERS
vendored
@@ -1,9 +0,0 @@
|
||||
# Project
|
||||
/.github/CODEOWNERS @Jarred-Sumner
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
35
.github/ISSUE_TEMPLATE/1-install-problem.yml
vendored
Normal file
35
.github/ISSUE_TEMPLATE/1-install-problem.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: 📥 Install Problem
|
||||
description: Report an issue during install or upgrade
|
||||
labels: [bug, install]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting a bug report. It helps make Bun better.
|
||||
|
||||
If you need help or support using Bun, and are not reporting an issue, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
|
||||
|
||||
Please try to include as much information as possible.
|
||||
- type: input
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: How did you attempt to install or upgrade?
|
||||
description: Please provide the commands you ran to install or upgrade.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Is there anything else you think we should know?
|
||||
8
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
8
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -1,8 +1,6 @@
|
||||
name: 🐛 Bug Report
|
||||
description: Report an issue that should be fixed
|
||||
labels:
|
||||
- bug
|
||||
- needs triage
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -12,11 +10,7 @@ body:
|
||||
If you need help or support using Bun, and are not reporting a bug, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
|
||||
|
||||
Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun.
|
||||
The bug you are experiencing may already have been fixed.
|
||||
|
||||
Please try to include as much information as possible.
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Bun is running?
|
||||
|
||||
24
.github/ISSUE_TEMPLATE/3-feature-request.yml
vendored
Normal file
24
.github/ISSUE_TEMPLATE/3-feature-request.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: 🚀 Feature Request
|
||||
description: Suggest an idea, feature, or enhancement
|
||||
labels: [enhancement]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting an idea. It helps make Bun better.
|
||||
|
||||
If you want to discuss Bun, or learn how others are using Bun, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback-ideas`](https://discord.gg/unwUnHBNqy) channel.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the problem this feature would solve?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the feature you are proposing to solve the problem?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What alternatives have you considered?
|
||||
@@ -1,45 +0,0 @@
|
||||
name: 🇹 TypeScript Type Bug Report
|
||||
description: Report an issue with TypeScript types
|
||||
labels: [bug, types]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting a bug report. It helps make Bun better.
|
||||
|
||||
If you need help or support using Bun, and are not reporting a bug, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
|
||||
|
||||
Make sure you are running the [latest](https://bun.com/docs/installation#upgrading) version of Bun.
|
||||
The bug you are experiencing may already have been fixed.
|
||||
|
||||
Please try to include as much information as possible.
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Bun is running?
|
||||
description: Copy the output of `bun --revision`
|
||||
- type: input
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Is there anything else you think we should know?
|
||||
24
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
24
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: 🚀 Feature Request
|
||||
description: Suggest an idea, feature, or enhancement
|
||||
labels: [enhancement]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting an idea. It helps make Bun better.
|
||||
|
||||
If you want to discuss Bun, or learn how others are using Bun, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback`](https://discord.gg/unwUnHBNqy) channel.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the problem this feature would solve?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the feature you are proposing to solve the problem?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What alternatives have you considered?
|
||||
28
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
28
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
@@ -1,28 +0,0 @@
|
||||
name: Prefilled crash report
|
||||
description: Report a crash in Bun
|
||||
labels:
|
||||
- crash
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: code
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be
|
||||
automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: remapped_trace
|
||||
attributes:
|
||||
label: Stack Trace (bun.report)
|
||||
validations:
|
||||
required: true
|
||||
@@ -1,34 +0,0 @@
|
||||
name: bun install crash report
|
||||
description: Report a crash in bun install
|
||||
labels:
|
||||
- npm
|
||||
- crash
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: package_json
|
||||
attributes:
|
||||
label: "`package.json` file"
|
||||
description: "Can you upload your `package.json` file? This helps us reproduce the crash."
|
||||
render: json
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be
|
||||
automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: remapped_trace
|
||||
attributes:
|
||||
label: Stack Trace (bun.report)
|
||||
validations:
|
||||
required: true
|
||||
43
.github/actions/bump/action.yml
vendored
43
.github/actions/bump/action.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Bump version
|
||||
description: Bump the version of Bun
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: The most recent version of Bun.
|
||||
required: true
|
||||
type: string
|
||||
token:
|
||||
description: The GitHub token to use for creating a pull request.
|
||||
required: true
|
||||
type: string
|
||||
default: ${{ github.token }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Run Bump
|
||||
shell: bash
|
||||
id: bump
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MESSAGE=$(bun ./scripts/bump.ts patch --last-version=${{ inputs.version }})
|
||||
LATEST=$(cat LATEST)
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
LATEST
|
||||
token: ${{ inputs.token }}
|
||||
commit-message: Bump version to ${{ steps.bump.outputs.version }}
|
||||
title: Bump to ${{ steps.bump.outputs.version }}
|
||||
delete-branch: true
|
||||
branch: github-actions/bump-version-${{ steps.bump.outputs.version }}--${{ github.run_id }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
${{ steps.bump.outputs.message }}
|
||||
|
||||
Auto-bumped by [this workflow](https://github.com/oven-sh/bun/actions/workflows/release.yml)
|
||||
51
.github/actions/setup-bun/action.yml
vendored
51
.github/actions/setup-bun/action.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Setup Bun
|
||||
description: An internal version of the 'oven-sh/setup-bun' action.
|
||||
|
||||
inputs:
|
||||
bun-version:
|
||||
type: string
|
||||
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
|
||||
default: latest
|
||||
required: false
|
||||
baseline:
|
||||
type: boolean
|
||||
description: "Whether to use the baseline version of bun."
|
||||
default: false
|
||||
required: false
|
||||
download-url:
|
||||
type: string
|
||||
description: "The base URL to download bun from."
|
||||
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
|
||||
required: false
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
case "$(uname -s)" in
|
||||
Linux*) os=linux;;
|
||||
Darwin*) os=darwin;;
|
||||
*) os=windows;;
|
||||
esac
|
||||
case "$(uname -m)" in
|
||||
arm64 | aarch64) arch=aarch64;;
|
||||
*) arch=x64;;
|
||||
esac
|
||||
case "${{ inputs.baseline }}" in
|
||||
true | 1) target="bun-${os}-${arch}-baseline";;
|
||||
*) target="bun-${os}-${arch}";;
|
||||
esac
|
||||
case "${{ inputs.bun-version }}" in
|
||||
latest) release="latest";;
|
||||
canary) release="canary";;
|
||||
*) release="bun-v${{ inputs.bun-version }}";;
|
||||
esac
|
||||
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5
|
||||
unzip ${target}.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/*
|
||||
ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx
|
||||
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}
|
||||
59
.github/pull_request_template.md
vendored
59
.github/pull_request_template.md
vendored
@@ -1,3 +1,62 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I ran `make js` and committed the transpiled changes
|
||||
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I or my editor ran `zig fmt` on the changed files
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If functions were added to exports.zig or bindings.zig
|
||||
|
||||
- [ ] I ran `make headers` to regenerate the C header file
|
||||
|
||||
-->
|
||||
|
||||
<!-- If \*.classes.ts files were added or changed:
|
||||
|
||||
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
103
.github/workflows/CLAUDE.md
vendored
103
.github/workflows/CLAUDE.md
vendored
@@ -1,103 +0,0 @@
|
||||
# GitHub Actions Workflow Maintenance Guide
|
||||
|
||||
This document provides guidance for maintaining the GitHub Actions workflows in this repository.
|
||||
|
||||
## format.yml Workflow
|
||||
|
||||
### Overview
|
||||
The `format.yml` workflow runs code formatters (Prettier, clang-format, and Zig fmt) on pull requests and pushes to main. It's optimized for speed by running all formatters in parallel.
|
||||
|
||||
### Key Components
|
||||
|
||||
#### 1. Clang-format Script (`scripts/run-clang-format.sh`)
|
||||
- **Purpose**: Formats C++ source and header files
|
||||
- **What it does**:
|
||||
- Reads C++ files from `cmake/sources/CxxSources.txt`
|
||||
- Finds all header files in `src/` and `packages/`
|
||||
- Excludes third-party directories (libuv, napi, deps, vendor, sqlite, etc.)
|
||||
- Requires specific clang-format version (no fallbacks)
|
||||
|
||||
**Important exclusions**:
|
||||
- `src/napi/` - Node API headers (third-party)
|
||||
- `src/bun.js/bindings/libuv/` - libuv headers (third-party)
|
||||
- `src/bun.js/bindings/sqlite/` - SQLite headers (third-party)
|
||||
- `src/bun.js/api/ffi-*.h` - FFI headers (generated/third-party)
|
||||
- `src/deps/` - Dependencies (third-party)
|
||||
- Files in `vendor/`, `third_party/`, `generated/` directories
|
||||
|
||||
#### 2. Parallel Execution
|
||||
The workflow runs all three formatters simultaneously:
|
||||
- Each formatter outputs with a prefix (`[prettier]`, `[clang-format]`, `[zig]`)
|
||||
- Output is streamed in real-time without blocking
|
||||
- Uses GitHub Actions groups (`::group::`) for collapsible sections
|
||||
|
||||
#### 3. Tool Installation
|
||||
|
||||
##### Clang-format-19
|
||||
- Installs ONLY `clang-format-19` package (not the entire LLVM toolchain)
|
||||
- Uses `--no-install-recommends --no-install-suggests` to skip unnecessary packages
|
||||
- Quiet installation with `-qq` and `-o=Dpkg::Use-Pty=0`
|
||||
|
||||
##### Zig
|
||||
- Downloads from `oven-sh/zig` releases (musl build for static linking)
|
||||
- URL: `https://github.com/oven-sh/zig/releases/download/autobuild-{COMMIT}/bootstrap-x86_64-linux-musl.zip`
|
||||
- Extracts to temp directory to avoid polluting the repository
|
||||
- Directory structure: `bootstrap-x86_64-linux-musl/zig`
|
||||
|
||||
### Updating the Workflow
|
||||
|
||||
#### To update Zig version:
|
||||
1. Find the new commit hash from https://github.com/oven-sh/zig/releases
|
||||
2. Replace the hash in the wget URL (line 65 of format.yml)
|
||||
3. Test that the URL is valid and the binary works
|
||||
|
||||
#### To update clang-format version:
|
||||
1. Update `LLVM_VERSION_MAJOR` environment variable at the top of format.yml
|
||||
2. Update the version check in `scripts/run-clang-format.sh`
|
||||
|
||||
#### To add/remove file exclusions:
|
||||
1. Edit the exclusion patterns in `scripts/run-clang-format.sh` (lines 34-39)
|
||||
2. Test locally to ensure the right files are being formatted
|
||||
|
||||
### Performance Optimizations
|
||||
1. **Parallel execution**: All formatters run simultaneously
|
||||
2. **Minimal installations**: Only required packages, no extras
|
||||
3. **Temp directories**: Tools downloaded to temp dirs, cleaned up after use
|
||||
4. **Streaming output**: Real-time feedback without buffering
|
||||
5. **Early start**: Formatting begins immediately after each tool is ready
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**If formatters appear to run sequentially:**
|
||||
- Check if output is being buffered (should use `sed` for line prefixing)
|
||||
- Ensure background processes use `&` and proper wait commands
|
||||
|
||||
**If third-party files are being formatted:**
|
||||
- Review exclusion patterns in `scripts/run-clang-format.sh`
|
||||
- Check if new third-party directories were added that need exclusion
|
||||
|
||||
**If clang-format installation is slow:**
|
||||
- Ensure using minimal package installation flags
|
||||
- Check if apt cache needs updating
|
||||
- Consider caching the clang-format binary between runs
|
||||
|
||||
### Testing Changes Locally
|
||||
|
||||
```bash
|
||||
# Test the clang-format script
|
||||
export LLVM_VERSION_MAJOR=19
|
||||
./scripts/run-clang-format.sh format
|
||||
|
||||
# Test with check mode (no modifications)
|
||||
./scripts/run-clang-format.sh check
|
||||
|
||||
# Test specific file exclusions
|
||||
./scripts/run-clang-format.sh format 2>&1 | grep -E "(libuv|napi|deps)"
|
||||
# Should return nothing if exclusions work correctly
|
||||
```
|
||||
|
||||
### Important Notes
|
||||
- The script defaults to **format** mode (modifies files)
|
||||
- Always test locally before pushing workflow changes
|
||||
- The musl Zig build works on glibc systems due to static linking
|
||||
- Keep the exclusion list updated as new third-party code is added
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
19
.github/workflows/auto-assign-types.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
29
.github/workflows/auto-close-duplicates.yml
vendored
29
.github/workflows/auto-close-duplicates.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Auto-close duplicate issues
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
auto-close-duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: auto-close-duplicates-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Auto-close duplicate issues
|
||||
run: bun run scripts/auto-close-duplicates.ts
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
24
.github/workflows/auto-label-claude-prs.yml
vendored
24
.github/workflows/auto-label-claude-prs.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Auto-label Claude PRs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
auto-label:
|
||||
if: github.event.pull_request.user.login == 'robobun' || contains(github.event.pull_request.body, '🤖 Generated with')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Add claude label to PRs from robobun
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['claude']
|
||||
});
|
||||
18
.github/workflows/bun-deploy-site.yml
vendored
Normal file
18
.github/workflows/bun-deploy-site.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# redeploy Vercel site when a file in `docs` changes
|
||||
# using VERCEL_DEPLOY_HOOK environment variable
|
||||
|
||||
name: Deploy site
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy site
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- name: Trigger Vercel build
|
||||
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}
|
||||
50
.github/workflows/bun-ecosystem-test.yml
vendored
Normal file
50
.github/workflows/bun-ecosystem-test.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: bun-ecosystem-test
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 15 * * *" # every day at 7am PST
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of Bun to run"
|
||||
required: true
|
||||
default: "canary"
|
||||
type: string
|
||||
jobs:
|
||||
test:
|
||||
name: ${{ matrix.tag }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 10
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
tag: linux-x64
|
||||
url: linux/x64?avx2=true
|
||||
- os: ubuntu-latest
|
||||
tag: linux-x64-baseline
|
||||
url: linux/x64?baseline=true
|
||||
# FIXME: runner fails with "No tests found"?
|
||||
#- os: macos-latest
|
||||
# tag: darwin-x64
|
||||
# url: darwin/x64?avx2=true
|
||||
- os: macos-latest
|
||||
tag: darwin-x64-baseline
|
||||
url: darwin/x64?baseline=true
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: Bhacaz/checkout-files@v2
|
||||
with:
|
||||
files: packages/bun-internal-test
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
|
||||
- id: test
|
||||
name: Test
|
||||
working-directory: packages/bun-internal-test
|
||||
run: bun run test:ecosystem
|
||||
41
.github/workflows/bun-framework-next.yml
vendored
Normal file
41
.github/workflows/bun-framework-next.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: bun-framework-next
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- packages/bun-framework-next/**/*
|
||||
branches: [main, bun-framework-next-actions]
|
||||
pull_request:
|
||||
paths:
|
||||
- packages/bun-framework-next/**/*
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: lint, test and build on Node ${{ matrix.node }} and ${{ matrix.os }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["14.x"]
|
||||
os: [macOS-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Use Node ${{ matrix.node }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
|
||||
- name: Install PNPM
|
||||
uses: pnpm/action-setup@v2.0.1
|
||||
with:
|
||||
version: 6.21.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: cd packages/bun-framework-next && pnpm install
|
||||
|
||||
- name: Type check bun-framework-next
|
||||
run: cd packages/bun-framework-next && pnpm check
|
||||
136
.github/workflows/bun-linux-aarch64.yml
vendored
Normal file
136
.github/workflows/bun-linux-aarch64.yml
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
name: bun-linux
|
||||
|
||||
concurrency:
|
||||
group: bun-linux-aarch64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: ${{matrix.tag}}
|
||||
runs-on: ${{matrix.runner}}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: native
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: |
|
||||
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
build-args: |
|
||||
ARCH=${{matrix.arch}}
|
||||
BUILDARCH=${{matrix.build_arch}}
|
||||
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
|
||||
CPU_TARGET=${{matrix.cpu}}
|
||||
WEBKIT_URL=${{matrix.webkit_url}}
|
||||
GIT_SHA=${{github.sha}}
|
||||
WEBKIT_BASENAME=${{matrix.webkit_basename}}
|
||||
platforms: linux/${{matrix.build_arch}}
|
||||
target: artifact
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Zip
|
||||
run: |
|
||||
# if zip is not found
|
||||
if [ ! -x "$(command -v zip)" ]; then
|
||||
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
|
||||
fi
|
||||
|
||||
if [ ! -x "$(command -v strip)" ]; then
|
||||
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
|
||||
fi
|
||||
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir bun-${{matrix.tag}}-profile
|
||||
mkdir bun-${{matrix.tag}}
|
||||
|
||||
strip bun
|
||||
|
||||
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
|
||||
mv bun bun-${{matrix.tag}}/bun
|
||||
|
||||
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
|
||||
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-obj-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-obj
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-dependencies
|
||||
path: ${{runner.temp}}/release/bun-dependencies
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
|
||||
232
.github/workflows/bun-linux-build.yml
vendored
Normal file
232
.github/workflows/bun-linux-build.yml
vendored
Normal file
@@ -0,0 +1,232 @@
|
||||
name: bun-linux
|
||||
|
||||
concurrency:
|
||||
group: bun-linux-build-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: ${{matrix.tag}}
|
||||
runs-on: ${{matrix.runner}}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- cpu: haswell
|
||||
tag: linux-x64
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
- cpu: nehalem
|
||||
tag: linux-x64-baseline
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: |
|
||||
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
build-args: |
|
||||
ARCH=${{matrix.arch}}
|
||||
BUILDARCH=${{matrix.build_arch}}
|
||||
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
|
||||
CPU_TARGET=${{matrix.cpu}}
|
||||
WEBKIT_URL=${{matrix.webkit_url}}
|
||||
GIT_SHA=${{github.sha}}
|
||||
WEBKIT_BASENAME=${{matrix.webkit_basename}}
|
||||
platforms: linux/${{matrix.build_arch}}
|
||||
target: artifact
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Zip
|
||||
run: |
|
||||
# if zip is not found
|
||||
if [ ! -x "$(command -v zip)" ]; then
|
||||
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
|
||||
fi
|
||||
|
||||
if [ ! -x "$(command -v strip)" ]; then
|
||||
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
|
||||
fi
|
||||
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir bun-${{matrix.tag}}-profile
|
||||
mkdir bun-${{matrix.tag}}
|
||||
|
||||
strip bun
|
||||
|
||||
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
|
||||
mv bun bun-${{matrix.tag}}/bun
|
||||
|
||||
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
|
||||
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-obj-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-obj
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-dependencies
|
||||
path: ${{runner.temp}}/release/bun-dependencies
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
|
||||
|
||||
linux-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linux]
|
||||
if: github.event_name == 'pull_request'
|
||||
timeout-minutes: 20
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: linux-x64
|
||||
- tag: linux-x64-baseline
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip bun-${{matrix.tag}}.zip
|
||||
cd bun-${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
470
.github/workflows/bun-mac-aarch64.yml
vendored
Normal file
470
.github/workflows/bun-mac-aarch64.yml
vendored
Normal file
@@ -0,0 +1,470 @@
|
||||
name: bun-macOS-aarch64
|
||||
|
||||
concurrency:
|
||||
group: bun-macOS-aarch64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64-baseline
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=amd64
|
||||
BUILD_MACHINE_ARCH=x86_64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/amd64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'ARM64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=arm64
|
||||
BUILD_MACHINE_ARCH=aarch64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/arm64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}
|
||||
path: ${{runner.temp}}/release/bun.o
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
dependencies: true
|
||||
compile_obj: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Compile dependencies
|
||||
if: matrix.dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
make vendor-without-check
|
||||
- name: Compile C++
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make clean-bindings
|
||||
make -j $(sysctl -n hw.ncpu) release-bindings
|
||||
- name: Upload C++
|
||||
if: matrix.compile_obj
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Upload Dependencies
|
||||
if: matrix.dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
- name: Download WebKit
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Download C++
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.obj }}
|
||||
path: ${{ runner.temp }}/release
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir ${{matrix.tag}}-profile
|
||||
mkdir ${{matrix.tag}}
|
||||
|
||||
/usr/bin/strip -S bun
|
||||
|
||||
mv bun-profile ${{matrix.tag}}-profile/bun-profile
|
||||
mv bun ${{matrix.tag}}/bun
|
||||
|
||||
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
|
||||
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
474
.github/workflows/bun-mac-x64-baseline.yml
vendored
Normal file
474
.github/workflows/bun-mac-x64-baseline.yml
vendored
Normal file
@@ -0,0 +1,474 @@
|
||||
name: bun-macOS-x64-baseline
|
||||
|
||||
concurrency:
|
||||
group: bun-macOS-x64-baseline-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
arch: x86_64
|
||||
tag: bun-obj-darwin-x64-baseline
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=amd64
|
||||
BUILD_MACHINE_ARCH=x86_64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/amd64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'ARM64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=arm64
|
||||
BUILD_MACHINE_ARCH=aarch64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/arm64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}
|
||||
path: ${{runner.temp}}/release/bun.o
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: nehalem
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.dependencies
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
- name: ccache (c++)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.compile_obj
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Compile dependencies
|
||||
if: matrix.dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make vendor-without-check
|
||||
- name: Compile C++
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make -j $(sysctl -n hw.ncpu) release-bindings
|
||||
- name: Upload C++
|
||||
if: matrix.compile_obj
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Upload Dependencies
|
||||
if: matrix.dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
- name: Download WebKit
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Download C++
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.obj }}
|
||||
path: ${{ runner.temp }}/release
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir ${{matrix.tag}}-profile
|
||||
mkdir ${{matrix.tag}}
|
||||
|
||||
/usr/bin/strip -S bun
|
||||
|
||||
mv bun-profile ${{matrix.tag}}-profile/bun-profile
|
||||
mv bun ${{matrix.tag}}/bun
|
||||
|
||||
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
|
||||
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
476
.github/workflows/bun-mac-x64.yml
vendored
Normal file
476
.github/workflows/bun-mac-x64.yml
vendored
Normal file
@@ -0,0 +1,476 @@
|
||||
name: bun-macOS-x64
|
||||
|
||||
concurrency:
|
||||
group: bun-macOS-x64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64-baseline
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-obj-darwin-x64
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=amd64
|
||||
BUILD_MACHINE_ARCH=x86_64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/amd64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'ARM64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=arm64
|
||||
BUILD_MACHINE_ARCH=aarch64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/arm64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}
|
||||
path: ${{runner.temp}}/release/bun.o
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.dependencies
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
- name: ccache (c++)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.compile_obj
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
- name: Compile dependencies
|
||||
if: matrix.dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make vendor-without-check
|
||||
- name: Compile C++
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make -j $(sysctl -n hw.ncpu) release-bindings
|
||||
- name: Upload C++
|
||||
if: matrix.compile_obj
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Upload Dependencies
|
||||
if: matrix.dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Download C++
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.obj }}
|
||||
path: ${{ runner.temp }}/release
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir ${{matrix.tag}}-profile
|
||||
mkdir ${{matrix.tag}}
|
||||
|
||||
/usr/bin/strip -S bun
|
||||
|
||||
mv bun-profile ${{matrix.tag}}-profile/bun-profile
|
||||
mv bun ${{matrix.tag}}/bun
|
||||
|
||||
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
|
||||
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-x64
|
||||
runner: macos-11
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
message: |
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-failures-${{matrix.tag}}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ test failures on ${{ matrix.tag }} have been resolved.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
179
.github/workflows/bun-release-canary.yml
vendored
Normal file
179
.github/workflows/bun-release-canary.yml
vendored
Normal file
@@ -0,0 +1,179 @@
|
||||
name: bun-release-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 14 * * *" # every day at 6am PST
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-npm -- canary publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
# npm-types:
|
||||
# name: Release types to NPM
|
||||
# runs-on: ubuntu-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# working-directory: packages/bun-types
|
||||
# steps:
|
||||
# - id: checkout
|
||||
# name: Checkout
|
||||
# uses: actions/checkout@v3
|
||||
# - id: setup-node
|
||||
# name: Setup Node.js
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: latest
|
||||
# - id: setup-bun
|
||||
# name: Setup Bun
|
||||
# uses: oven-sh/setup-bun@v1
|
||||
# with:
|
||||
# bun-version: canary
|
||||
# - id: bun-install
|
||||
# name: Install Dependencies
|
||||
# run: bun install
|
||||
# - id: setup-env
|
||||
# name: Setup Environment
|
||||
# run: |
|
||||
# SHA=$(git rev-parse --short "$GITHUB_SHA")
|
||||
# VERSION=$(bun --version)
|
||||
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
|
||||
# echo "Setup tag: ${TAG}"
|
||||
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
# - id: bun-run
|
||||
# name: Build
|
||||
# run: bun run build
|
||||
# env:
|
||||
# BUN_VERSION: ${{ env.TAG }}
|
||||
# - id: npm-publish
|
||||
# name: Release
|
||||
# uses: JS-DevTools/npm-publish@v1
|
||||
# with:
|
||||
# package: packages/bun-types/dist/package.json
|
||||
# token: ${{ secrets.NPM_TOKEN }}
|
||||
# tag: canary
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: qemu
|
||||
name: Setup Docker QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: canary
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- id: push
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub
|
||||
file: ./dockerhub/Dockerfile-debian
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=canary
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-s3 -- canary
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
54
.github/workflows/bun-release-types-canary.yml
vendored
Normal file
54
.github/workflows/bun-release-types-canary.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: bun-release-types-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-node
|
||||
name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
SHA=$(git rev-parse --short "$GITHUB_SHA")
|
||||
VERSION=$(bun --version)
|
||||
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: bun-run
|
||||
name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG }}
|
||||
- id: npm-publish
|
||||
name: Release
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
tag: canary
|
||||
257
.github/workflows/bun-release.yml
vendored
Normal file
257
.github/workflows/bun-release.yml
vendored
Normal file
@@ -0,0 +1,257 @@
|
||||
name: bun-release
|
||||
concurrency: release
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
type: string
|
||||
description: The tag to publish
|
||||
required: true
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-npm -- "${{ env.TAG }}" publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-node
|
||||
name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG }}
|
||||
- id: npm-publish
|
||||
name: Release
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: environment
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: qemu
|
||||
name: Setup Docker QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: |
|
||||
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- id: push
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub
|
||||
file: ./dockerhub/Dockerfile-debian
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=${{ env.TAG }}
|
||||
homebrew:
|
||||
name: Release to Homebrew
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: oven-sh/homebrew-bun
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-ruby
|
||||
name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: "2.6"
|
||||
- id: update-tap
|
||||
name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ env.TAG }}"
|
||||
- id: commit-tap
|
||||
name: Commit Tap
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
|
||||
commit_message: Release ${{ env.TAG }}
|
||||
commit_user_name: robobun
|
||||
commit_user_email: robobun@oven.sh
|
||||
commit_author: robobun <robobun@oven.sh>
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-s3 -- "${{ env.TAG }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
41
.github/workflows/bun-types-tests.yml
vendored
Normal file
41
.github/workflows/bun-types-tests.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: bun-types
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
branches: [main]
|
||||
pull_request:
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: type-tests
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
|
||||
- name: Install node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
|
||||
- name: Generate package
|
||||
run: bun run build
|
||||
|
||||
- name: Tests
|
||||
run: bun run test
|
||||
34
.github/workflows/claude-dedupe-issues.yml
vendored
34
.github/workflows/claude-dedupe-issues.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Claude Issue Dedupe
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue_number:
|
||||
description: 'Issue number to process for duplicate detection'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
claude-dedupe-issues:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
concurrency:
|
||||
group: claude-dedupe-issues-${{ github.event.issue.number || inputs.issue_number }}
|
||||
cancel-in-progress: true
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Run Claude Code slash command
|
||||
uses: anthropics/claude-code-base-action@beta
|
||||
with:
|
||||
prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
claude_args: "--model claude-sonnet-4-5-20250929"
|
||||
claude_env: |
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
85
.github/workflows/comment-lint.yml.disabled
vendored
85
.github/workflows/comment-lint.yml.disabled
vendored
@@ -1,85 +0,0 @@
|
||||
name: C++ Linter comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- lint-cpp
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment-lint:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Comment
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: format.log
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: PR Number
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Did Fail
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: did_fail.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
# Copy to outputs
|
||||
echo "pr-number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo 'text_output<<EOF'
|
||||
cat format.log
|
||||
echo EOF
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
- name: Update Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail != '0'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
@${{ github.actor }}, `clang-tidy` had something to share with you about your code:
|
||||
|
||||
```cpp
|
||||
${{ steps.env.outputs.text_output }}
|
||||
```
|
||||
|
||||
Commit: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
- name: Update Previous Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
clang-tidy nits are fixed! Thank you.
|
||||
|
||||
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
109
.github/workflows/format.yml
vendored
109
.github/workflows/format.yml
vendored
@@ -1,109 +0,0 @@
|
||||
name: autofix.ci
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
env:
|
||||
BUN_VERSION: "1.3.2"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
jobs:
|
||||
autofix:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
bun scripts/glob-sources.mjs
|
||||
- name: Format Code
|
||||
run: |
|
||||
# Start prettier in background with prefixed output
|
||||
echo "::group::Prettier"
|
||||
(bun run prettier 2>&1 | sed 's/^/[prettier] /' || echo "[prettier] Failed with exit code $?") &
|
||||
PRETTIER_PID=$!
|
||||
|
||||
# Start clang-format installation and formatting in background with prefixed output
|
||||
echo "::group::Clang-format"
|
||||
(
|
||||
echo "[clang-format] Installing clang-format-${{ env.LLVM_VERSION_MAJOR }}..."
|
||||
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc > /dev/null
|
||||
echo "deb http://apt.llvm.org/$(lsb_release -cs)/ llvm-toolchain-$(lsb_release -cs)-${{ env.LLVM_VERSION_MAJOR }} main" | sudo tee /etc/apt/sources.list.d/llvm.list > /dev/null
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y -qq --no-install-recommends --no-install-suggests -o=Dpkg::Use-Pty=0 clang-format-${{ env.LLVM_VERSION_MAJOR }}
|
||||
echo "[clang-format] Running clang-format..."
|
||||
LLVM_VERSION_MAJOR=${{ env.LLVM_VERSION_MAJOR }} ./scripts/run-clang-format.sh format 2>&1 | sed 's/^/[clang-format] /'
|
||||
) &
|
||||
CLANG_PID=$!
|
||||
|
||||
# Setup Zig in temp directory and run zig fmt in background with prefixed output
|
||||
echo "::group::Zig fmt"
|
||||
(
|
||||
ZIG_TEMP=$(mktemp -d)
|
||||
echo "[zig] Downloading Zig (musl build)..."
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-e0b7c318f318196c5f81fdf3423816a7b5bb3112/bootstrap-x86_64-linux-musl.zip
|
||||
unzip -q -d "$ZIG_TEMP" "$ZIG_TEMP/zig.zip"
|
||||
export PATH="$ZIG_TEMP/bootstrap-x86_64-linux-musl:$PATH"
|
||||
echo "[zig] Running zig fmt..."
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
./scripts/sort-imports.ts src 2>&1 | sed 's/^/[zig] /'
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
rm -rf "$ZIG_TEMP"
|
||||
) &
|
||||
ZIG_PID=$!
|
||||
|
||||
# Wait for all formatting tasks to complete
|
||||
echo ""
|
||||
echo "Running formatters in parallel..."
|
||||
FAILED=0
|
||||
|
||||
if ! wait $PRETTIER_PID; then
|
||||
echo "::error::Prettier failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $CLANG_PID; then
|
||||
echo "::error::Clang-format failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $ZIG_PID; then
|
||||
echo "::error::Zig fmt failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
# Exit with error if any formatter failed
|
||||
if [ $FAILED -eq 1 ]; then
|
||||
echo "::error::One or more formatters failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All formatters completed successfully"
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
git rm -f cmake/sources/*.txt || true
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
278
.github/workflows/labeled.yml.disabled
vendored
278
.github/workflows/labeled.yml.disabled
vendored
@@ -1,278 +0,0 @@
|
||||
name: Issue Labeled
|
||||
env:
|
||||
BUN_VERSION: 1.1.44
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
pull_request_target:
|
||||
types: [labeled, opened, reopened, synchronize, unlabeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
# runs-on: ubuntu-latest
|
||||
# if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
|
||||
# permissions:
|
||||
# issues: write
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v4
|
||||
# with:
|
||||
# sparse-checkout: |
|
||||
# scripts
|
||||
# .github
|
||||
# CMakeLists.txt
|
||||
# - name: Setup Bun
|
||||
# uses: ./.github/actions/setup-bun
|
||||
# with:
|
||||
# bun-version: ${{ env.BUN_VERSION }}
|
||||
# - name: "categorize bug"
|
||||
# id: add-labels
|
||||
# env:
|
||||
# GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
# GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
# ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
# shell: bash
|
||||
# run: |
|
||||
# echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
|
||||
# LABELS=$(bun scripts/label-issue.ts)
|
||||
# echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
# - name: Add labels
|
||||
# uses: actions-cool/issues-helper@v3
|
||||
# if: steps.add-labels.outputs.labels != ''
|
||||
# with:
|
||||
# actions: "add-labels"
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-slop:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'slop')
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Update PR title and body for slop and close
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
title: 'ai slop',
|
||||
body: 'This PR has been marked as AI slop and the description has been updated to avoid confusion or misleading reviewers.\n\nMany AI PRs are fine, but sometimes they submit a PR too early, fail to test if the problem is real, fail to reproduce the problem, or fail to test that the problem is fixed. If you think this PR is not AI slop, please leave a comment.',
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Delete the branch if it's from a fork or if it's not a protected branch
|
||||
try {
|
||||
await github.rest.git.deleteRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: `heads/${pr.data.head.ref}`
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Could not delete branch:', error.message);
|
||||
}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'issues' && (github.event.label.name == 'crash' || github.event.label.name == 'needs repro')
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
scripts
|
||||
.github
|
||||
CMakeLists.txt
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: "add platform and command label"
|
||||
id: add-labels
|
||||
if: github.event.label.name == 'crash'
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "outdated.txt" ]]; then
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
else
|
||||
echo "is-very-outdated=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = ${{ fromJson(steps.add-labels.outputs.close-action) }};
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
|
||||
shell: bash
|
||||
run: |
|
||||
bun scripts/associate-issue-with-sentry.ts
|
||||
|
||||
if [[ -f "sentry-link.txt" ]]; then
|
||||
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "sentry-id.txt" ]]; then
|
||||
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Remove old labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash' && steps.add-labels.outputs.is-very-outdated == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: old-version
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Thank you for reporting this crash.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment needs repro
|
||||
if: github.event.label.name == 'needs repro'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days.
|
||||
21
.github/workflows/lint.yml
vendored
21
.github/workflows/lint.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.10"
|
||||
|
||||
jobs:
|
||||
lint-js:
|
||||
name: "Lint JavaScript"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Lint
|
||||
run: bun lint
|
||||
55
.github/workflows/packages-ci.yml
vendored
55
.github/workflows/packages-ci.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Packages CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/**"
|
||||
- .prettierrc
|
||||
- .prettierignore
|
||||
- tsconfig.json
|
||||
- oxlint.json
|
||||
- "!**/*.md"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/**"
|
||||
- .prettierrc
|
||||
- .prettierignore
|
||||
- tsconfig.json
|
||||
- oxlint.json
|
||||
- "!**/*.md"
|
||||
|
||||
env:
|
||||
BUN_VERSION: "canary"
|
||||
|
||||
jobs:
|
||||
bun-plugin-svelte:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
pushd ./packages/bun-plugin-svelte && bun install
|
||||
|
||||
- name: Lint
|
||||
run: |
|
||||
bunx oxlint@0.15 --format github --deny-warnings
|
||||
bunx prettier --config ../../.prettierrc --check .
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
|
||||
- name: Check types
|
||||
run: bun check:types
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
|
||||
- name: Test
|
||||
run: bun test
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
76
.github/workflows/prettier-fmt.yml
vendored
Normal file
76
.github/workflows/prettier-fmt.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: prettier
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
prettier-fmt:
|
||||
name: prettier
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- id: install
|
||||
name: Install prettier
|
||||
run: bun install
|
||||
- name: Run prettier
|
||||
id: fmt
|
||||
run: |
|
||||
rm -f .failed
|
||||
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
|
||||
|
||||
if [ -s .failed ]; then
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `prettier` reported errors
|
||||
|
||||
```js
|
||||
${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
```sh
|
||||
bun fmt
|
||||
```
|
||||
|
||||
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `prettier` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
run: exit 1
|
||||
368
.github/workflows/release.yml
vendored
368
.github/workflows/release.yml
vendored
@@ -1,368 +0,0 @@
|
||||
# TODO: Move this to bash scripts intead of Github Actions
|
||||
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
|
||||
|
||||
name: Release
|
||||
concurrency: release
|
||||
|
||||
env:
|
||||
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
|
||||
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
schedule:
|
||||
- cron: "0 14 * * *" # every day at 6am PST
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
is-latest:
|
||||
description: Is this the latest release?
|
||||
type: boolean
|
||||
default: false
|
||||
tag:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
required: true
|
||||
use-docker:
|
||||
description: Should Docker images be released?
|
||||
type: boolean
|
||||
default: false
|
||||
use-npm:
|
||||
description: Should npm packages be published?
|
||||
type: boolean
|
||||
default: false
|
||||
use-homebrew:
|
||||
description: Should binaries be released to Homebrew?
|
||||
type: boolean
|
||||
default: false
|
||||
use-s3:
|
||||
description: Should binaries be uploaded to S3?
|
||||
type: boolean
|
||||
default: false
|
||||
use-types:
|
||||
description: Should types be released to npm?
|
||||
type: boolean
|
||||
default: false
|
||||
use-definitelytyped:
|
||||
description: "Should types be PR'd to DefinitelyTyped?"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: write
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.3"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# To workaround issue
|
||||
ref: main
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.3"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.3"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "canary" # Must be 'canary' so tag is correct
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Setup Tag
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
run: |
|
||||
VERSION=$(bun --version)
|
||||
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
|
||||
- name: Release (canary)
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
tag: canary
|
||||
- name: Release (latest)
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
definitelytyped:
|
||||
name: Make pr to DefinitelyTyped to update `bun-types` version
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-types
|
||||
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout (DefinitelyTyped)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: DefinitelyTyped/DefinitelyTyped
|
||||
- name: Checkout (bun)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: bun
|
||||
- name: Setup Bun
|
||||
uses: ./bun/.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- id: bun-version
|
||||
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
|
||||
- name: Update bun-types version in package.json
|
||||
run: |
|
||||
bun -e '
|
||||
const file = Bun.file("./types/bun/package.json");
|
||||
const json = await file.json();
|
||||
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
|
||||
json.dependencies["bun-types"] = version;
|
||||
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
|
||||
await file.write(JSON.stringify(json, null, 4) + "\n");
|
||||
'
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
|
||||
with:
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
add-paths: ./types/bun/package.json
|
||||
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
|
||||
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
|
||||
body: |
|
||||
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
|
||||
|
||||
https://bun.com/blog/${{ env.BUN_VERSION }}
|
||||
push-to-fork: oven-sh/DefinitelyTyped
|
||||
branch: ${{env.BUN_VERSION}}
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- variant: debian
|
||||
suffix: ""
|
||||
- variant: debian
|
||||
suffix: -debian
|
||||
- variant: slim
|
||||
suffix: -slim
|
||||
dir: debian-slim
|
||||
- variant: alpine
|
||||
suffix: -alpine
|
||||
- variant: distroless
|
||||
suffix: -distroless
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Docker emulator
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: oven/bun
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
|
||||
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
- name: Login to Docker
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Push to Docker
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=${{ env.BUN_VERSION }}
|
||||
homebrew:
|
||||
name: Release to Homebrew
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: oven-sh/homebrew-bun
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
- id: gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: "2.6"
|
||||
- name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
|
||||
- name: Commit Tap
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
|
||||
commit_message: Release ${{ env.BUN_VERSION }}
|
||||
commit_user_name: robobun
|
||||
commit_user_email: robobun@oven.sh
|
||||
commit_author: robobun <robobun@oven.sh>
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
needs: s3
|
||||
steps:
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
environment: production
|
||||
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
token: ${{ github.token }}
|
||||
30
.github/workflows/stale.yaml
vendored
30
.github/workflows/stale.yaml
vendored
@@ -1,30 +0,0 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: "15 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-issue-close: 5
|
||||
any-of-issue-labels: "needs repro,waiting-for-author"
|
||||
exempt-issue-labels: "neverstale"
|
||||
exempt-pr-labels: "neverstale"
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
|
||||
days-before-pr-stale: 30
|
||||
days-before-pr-close: 14
|
||||
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
|
||||
close-pr-message: "This pull request has been closed due to inactivity."
|
||||
repo-token: ${{ github.token }}
|
||||
operations-per-run: 1000
|
||||
32
.github/workflows/test-bump.yml
vendored
32
.github/workflows/test-bump.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: Test Bump version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
required: true
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
token: ${{ github.token }}
|
||||
99
.github/workflows/update-cares.yml
vendored
99
.github/workflows/update-cares.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update c-ares
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check c-ares version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildCares.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildCares.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildCares.cmake
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
102
.github/workflows/update-hdrhistogram.yml
vendored
102
.github/workflows/update-hdrhistogram.yml
vendored
@@ -1,102 +0,0 @@
|
||||
name: Update hdrhistogram
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check hdrhistogram version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHdrHistogram.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHdrHistogram.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHdrHistogram.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/HdrHistogram/HdrHistogram_c/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHdrHistogram.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHdrHistogram.cmake
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates hdrhistogram to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
118
.github/workflows/update-highway.yml
vendored
118
.github/workflows/update-highway.yml
vendored
@@ -1,118 +0,0 @@
|
||||
name: Update highway
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check highway version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildHighway.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildHighway.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildHighway.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/google/highway/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildHighway.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildHighway.cmake
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates highway to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
99
.github/workflows/update-libarchive.yml
vendored
99
.github/workflows/update-libarchive.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update libarchive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libarchive version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibArchive.cmake
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)
|
||||
99
.github/workflows/update-libdeflate.yml
vendored
99
.github/workflows/update-libdeflate.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update libdeflate
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libdeflate version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibDeflate.cmake
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)
|
||||
111
.github/workflows/update-lolhtml.yml
vendored
111
.github/workflows/update-lolhtml.yml
vendored
@@ -1,111 +0,0 @@
|
||||
name: Update lolhtml
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lolhtml version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLolHtml.cmake
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)
|
||||
116
.github/workflows/update-lshpack.yml
vendored
116
.github/workflows/update-lshpack.yml
vendored
@@ -1,116 +0,0 @@
|
||||
name: Update lshpack
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 5 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lshpack version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLshpack.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLshpack.cmake
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)
|
||||
82
.github/workflows/update-root-certs.yml
vendored
82
.github/workflows/update-root-certs.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Daily Root Certs Update Check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
|
||||
workflow_dispatch: # Allows manual trigger
|
||||
|
||||
jobs:
|
||||
check-and-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Generate root certs and capture output
|
||||
id: generate-certs
|
||||
run: |
|
||||
cd packages/bun-usockets/
|
||||
OUTPUT=$(bun generate-root-certs.mjs -v)
|
||||
echo "cert_output<<EOF" >> $GITHUB_ENV
|
||||
echo "$OUTPUT" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Check for changes and stage files
|
||||
id: check-changes
|
||||
run: |
|
||||
if [[ -n "$(git status --porcelain)" ]]; then
|
||||
echo "Found changes, staging modified files..."
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Get list of modified files and add them
|
||||
git status --porcelain | while read -r status file; do
|
||||
# Remove leading status and whitespace
|
||||
file=$(echo "$file" | sed 's/^.* //')
|
||||
echo "Adding changed file: $file"
|
||||
git add "$file"
|
||||
done
|
||||
|
||||
echo "changes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Store the list of changed files
|
||||
CHANGED_FILES=$(git status --porcelain)
|
||||
echo "changed_files<<EOF" >> $GITHUB_ENV
|
||||
echo "$CHANGED_FILES" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected"
|
||||
echo "changes=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.check-changes.outputs.changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
|
||||
title: "update(root_certs) $(date +'%Y-%m-%d')"
|
||||
body: |
|
||||
Automated root certificates update
|
||||
|
||||
${{ env.cert_output }}
|
||||
|
||||
## Changed Files:
|
||||
```
|
||||
${{ env.changed_files }}
|
||||
```
|
||||
branch: certs/update-root-certs
|
||||
base: main
|
||||
delete-branch: true
|
||||
labels:
|
||||
- "automation"
|
||||
- "root-certs"
|
||||
94
.github/workflows/update-sqlite3.yml
vendored
94
.github/workflows/update-sqlite3.yml
vendored
@@ -1,94 +0,0 @@
|
||||
name: Update SQLite3
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 0" # Run weekly
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check SQLite version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Get current version from the header file using SQLITE_VERSION_NUMBER
|
||||
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
|
||||
if [ -z "$CURRENT_VERSION_NUM" ]; then
|
||||
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
|
||||
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
|
||||
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
|
||||
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Fetch SQLite download page
|
||||
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
|
||||
if [ -z "$DOWNLOAD_PAGE" ]; then
|
||||
echo "Error: Failed to fetch SQLite download page"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract latest version and year from the amalgamation link
|
||||
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
|
||||
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
|
||||
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
|
||||
|
||||
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
|
||||
echo "Error: Could not extract latest version info"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
|
||||
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
|
||||
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
|
||||
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
|
||||
|
||||
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
|
||||
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Debug output
|
||||
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
|
||||
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
|
||||
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
./scripts/update-sqlite-amalgamation.sh ${{ steps.check-version.outputs.latest_num }} ${{ steps.check-version.outputs.latest_year }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
src/bun.js/bindings/sqlite/sqlite3.c
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)
|
||||
79
.github/workflows/update-vendor.yml
vendored
79
.github/workflows/update-vendor.yml
vendored
@@ -1,79 +0,0 @@
|
||||
name: Update vendor
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- elysia
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Check version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
|
||||
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
|
||||
|
||||
if [ -z "$current" ]; then
|
||||
echo "Error: Could not find COMMIT line in test/vendor.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$current" >> $GITHUB_OUTPUT
|
||||
echo "repository=$repository" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
test/vendor.json
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)
|
||||
99
.github/workflows/update-zstd.yml
vendored
99
.github/workflows/update-zstd.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update zstd
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check zstd version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/CloneZstd.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in CloneZstd.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in CloneZstd.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/facebook/zstd/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/facebook/zstd/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/CloneZstd.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/CloneZstd.cmake
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-zstd
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates zstd to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/facebook/zstd/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-zstd.yml)
|
||||
52
.github/workflows/vscode-release.yml
vendored
52
.github/workflows/vscode-release.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: VSCode Extension Publish
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to publish (e.g. 0.0.25) - Check the marketplace for the latest version"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: "Publish to VS Code Marketplace"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.18"
|
||||
|
||||
- name: Install dependencies (root)
|
||||
run: bun install
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Set Version
|
||||
run: bun pm version ${{ github.event.inputs.version }} --no-git-tag-version --allow-same-version
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Build (inspector protocol)
|
||||
run: bun install && bun run build
|
||||
working-directory: packages/bun-inspector-protocol
|
||||
|
||||
- name: Build (vscode extension)
|
||||
run: bun run build
|
||||
working-directory: packages/bun-vscode
|
||||
|
||||
- name: Publish
|
||||
if: success()
|
||||
run: bunx vsce publish
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
path: packages/bun-vscode/extension/bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
87
.github/workflows/zig-fmt.yml
vendored
Normal file
87
.github/workflows/zig-fmt.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.12.0-dev.163+6780a6bbf
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
paths:
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install zig
|
||||
run: |
|
||||
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
|
||||
tar -xf zig.tar.xz
|
||||
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
|
||||
|
||||
```zig
|
||||
// # zig fmt --check src/*.zig src/**/*.zig
|
||||
${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
|
||||
```sh
|
||||
zig fmt src/*.zig src/**/*.zig
|
||||
```
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `zig fmt` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
277
.gitignore
vendored
277
.gitignore
vendored
@@ -1,196 +1,135 @@
|
||||
.claude/settings.local.json
|
||||
.direnv
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.gdb_history
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
.npm
|
||||
.npmrc
|
||||
.npm.gz
|
||||
.parcel-cache
|
||||
.swcrc
|
||||
.trace
|
||||
.uuid
|
||||
.vs
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
.zig-cache
|
||||
.bake-debug
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
*.blob
|
||||
*.bun
|
||||
*.crash
|
||||
*.database
|
||||
*.db
|
||||
*.dmg
|
||||
*.dSYM
|
||||
*.generated.ts
|
||||
*.jsb
|
||||
*.lib
|
||||
*.log
|
||||
zig-cache
|
||||
packages/*/*.wasm
|
||||
*.o
|
||||
*.a
|
||||
profile.json
|
||||
|
||||
node_modules
|
||||
.swcrc
|
||||
yarn.lock
|
||||
dist
|
||||
*.tmp
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
*.pdb
|
||||
*.sqlite
|
||||
*.swp
|
||||
*.tmp
|
||||
*.trace
|
||||
*.wat
|
||||
*.zip
|
||||
**/.verdaccio-db.json
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
**/*.sln*
|
||||
**/*.vcxproj*
|
||||
**/package-lock.json
|
||||
/.cache
|
||||
/.webkit-cache
|
||||
/build-*/
|
||||
/bun-webkit
|
||||
/kcov-out
|
||||
/test-report.json
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/test.zig
|
||||
/testdir
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
bun-mimalloc
|
||||
bun-nomimalloc
|
||||
bun-singlehtreaded
|
||||
bun-test-scratch
|
||||
bun-zigld
|
||||
cmake_install.cmake
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
compile_commands.json
|
||||
*.wat
|
||||
zig-out
|
||||
pnpm-lock.yaml
|
||||
README.md.template
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/.gitattributes
|
||||
out
|
||||
outdir
|
||||
|
||||
.trace
|
||||
cover
|
||||
coverage
|
||||
coverv
|
||||
dist
|
||||
esbuilddir
|
||||
examples/lotta-modules/bun-nofscache
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-yday
|
||||
failing-tests.txt
|
||||
*.trace
|
||||
github
|
||||
make-dev-stats.csv
|
||||
misctools/fetch
|
||||
misctools/machbench
|
||||
misctools/sha
|
||||
myscript.sh
|
||||
node_modules
|
||||
node_modules_*
|
||||
out
|
||||
out.*
|
||||
outcss
|
||||
outdir
|
||||
out
|
||||
.parcel-cache
|
||||
esbuilddir
|
||||
*.bun
|
||||
parceldist
|
||||
esbuilddir
|
||||
outdir/
|
||||
packages/*/*.wasm
|
||||
packages/bun-*/*.o
|
||||
outcss
|
||||
.next
|
||||
txt.js
|
||||
.idea
|
||||
.vscode/cpp*
|
||||
.vscode/clang*
|
||||
|
||||
node_modules_*
|
||||
*.jsb
|
||||
*.zip
|
||||
bun-zigld
|
||||
bun-singlehtreaded
|
||||
bun-nomimalloc
|
||||
bun-mimalloc
|
||||
examples/lotta-modules/bun-yday
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-nofscache
|
||||
|
||||
src/node-fallbacks/out/*
|
||||
src/node-fallbacks/node_modules
|
||||
sign.json
|
||||
release/
|
||||
*.dmg
|
||||
sign.*.json
|
||||
packages/debug-*
|
||||
packages/bun-cli/postinstall.js
|
||||
packages/bun-*/bun
|
||||
packages/bun-*/bun-profile
|
||||
packages/bun-*/debug-bun
|
||||
packages/bun-cli/bin/*
|
||||
packages/bun-*/*.o
|
||||
packages/bun-cli/postinstall.js
|
||||
|
||||
packages/bun-cli/bin/*
|
||||
bun-test-scratch
|
||||
misctools/fetch
|
||||
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/tests.zig
|
||||
*.blob
|
||||
src/deps/s2n-tls
|
||||
.npm
|
||||
.npm.gz
|
||||
|
||||
bun-binary
|
||||
|
||||
src/deps/PLCrashReporter/
|
||||
|
||||
*.dSYM
|
||||
*.crash
|
||||
misctools/sha
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/debug-*
|
||||
parceldist
|
||||
pnpm-lock.yaml
|
||||
profile.json
|
||||
README.md.template
|
||||
release/
|
||||
scripts/env.local
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bake/generated.ts
|
||||
src/generated_enum_extractor.zig
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/bindings/GeneratedBindings.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
*.bc
|
||||
|
||||
src/fallback.version
|
||||
src/js/out/DebugPath.h
|
||||
src/js/out/functions*
|
||||
src/js/out/modules*
|
||||
src/js/out/tmp
|
||||
src/node-fallbacks/node_modules
|
||||
src/node-fallbacks/out/*
|
||||
src/runtime.version
|
||||
src/tests.zig
|
||||
*.sqlite
|
||||
*.database
|
||||
*.db
|
||||
misctools/machbench
|
||||
*.big
|
||||
.eslintcache
|
||||
|
||||
bun-webkit
|
||||
|
||||
src/deps/c-ares/build
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/debug-bindings-obj
|
||||
|
||||
failing-tests.txt
|
||||
test.txt
|
||||
test/js/bun/glob/fixtures
|
||||
test/node.js/upstream
|
||||
tsconfig.tsbuildinfo
|
||||
txt.js
|
||||
x64
|
||||
yarn.lock
|
||||
zig-cache
|
||||
zig-out
|
||||
test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
*.generated.ts
|
||||
src/bake/generated.ts
|
||||
test/cli/install/registry/packages/publish-pkg-*
|
||||
test/cli/install/registry/packages/@secret/publish-pkg-8
|
||||
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
tmp
|
||||
codegen-for-zig-team.tar.gz
|
||||
myscript.sh
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
# Dependencies (before CMake)
|
||||
# These can be removed in the far future
|
||||
/src/bun.js/WebKit
|
||||
/src/deps/boringssl
|
||||
/src/deps/brotli
|
||||
/src/deps/c*ares
|
||||
/src/deps/libarchive
|
||||
/src/deps/libdeflate
|
||||
/src/deps/libuv
|
||||
/src/deps/lol*html
|
||||
/src/deps/ls*hpack
|
||||
/src/deps/mimalloc
|
||||
/src/deps/picohttpparser
|
||||
/src/deps/tinycc
|
||||
/src/deps/WebKit
|
||||
/src/deps/zig
|
||||
/src/deps/zlib
|
||||
/src/deps/zstd
|
||||
/test.ts
|
||||
|
||||
# Generated files
|
||||
src/js/out/modules*
|
||||
src/js/out/functions*
|
||||
src/js/out/tmp
|
||||
src/js/out/DebugPath.h
|
||||
|
||||
.buildkite/ci.yml
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
make-dev-stats.csv
|
||||
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
.uuid
|
||||
tsconfig.tsbuildinfo
|
||||
77
.gitmodules
vendored
Normal file
77
.gitmodules
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
[submodule "src/deps/picohttpparser"]
|
||||
path = src/deps/picohttpparser
|
||||
url = https://github.com/h2o/picohttpparser.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/javascript/jsc/WebKit"]
|
||||
path = src/bun.js/WebKit
|
||||
url = https://github.com/oven-sh/WebKit.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
update = none
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/mimalloc"]
|
||||
path = src/deps/mimalloc
|
||||
url = https://github.com/Jarred-Sumner/mimalloc.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/zlib"]
|
||||
path = src/deps/zlib
|
||||
url = https://github.com/cloudflare/zlib.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/libarchive"]
|
||||
path = src/deps/libarchive
|
||||
url = https://github.com/libarchive/libarchive.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/boringssl"]
|
||||
path = src/deps/boringssl
|
||||
url = https://github.com/oven-sh/boringssl.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/lol-html"]
|
||||
path = src/deps/lol-html
|
||||
url = https://github.com/cloudflare/lol-html
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/uws"]
|
||||
path = src/deps/uws
|
||||
url = https://github.com/Jarred-Sumner/uWebSockets
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = true
|
||||
[submodule "src/deps/tinycc"]
|
||||
path = src/deps/tinycc
|
||||
url = https://github.com/Jarred-Sumner/tinycc.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/c-ares"]
|
||||
path = src/deps/c-ares
|
||||
url = https://github.com/c-ares/c-ares.git
|
||||
[submodule "src/deps/zstd"]
|
||||
path = src/deps/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/base64"]
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
2
.mailmap
2
.mailmap
@@ -1,2 +0,0 @@
|
||||
# To learn more about git's mailmap: https://ntietz.com/blog/git-mailmap-for-name-changes
|
||||
chloe caruso <git@paperclover.net> <me@paperdave.net>
|
||||
@@ -1,14 +1,13 @@
|
||||
src/fallback.html
|
||||
src/bun.js/WebKit
|
||||
vendor
|
||||
test/snapshots
|
||||
test/js/deno
|
||||
test/node.js
|
||||
src/js/out
|
||||
src/*.out.js
|
||||
src/*out.*.js
|
||||
src/deps
|
||||
src/test/fixtures
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
test/snippets
|
||||
test/js/node/test
|
||||
test/napi/node-napi-tests
|
||||
bun.lock
|
||||
|
||||
# the output codeblocks need to stay minified
|
||||
docs/bundler/minifier.mdx
|
||||
test/snapshots
|
||||
test/snapshots-no-hmr
|
||||
test/js/deno/*.test.ts
|
||||
test/js/deno/**/*.test.ts
|
||||
bench/react-hello-world/react-hello-world.node.js
|
||||
|
||||
30
.prettierrc
30
.prettierrc
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"printWidth": 120,
|
||||
"trailingComma": "all",
|
||||
"useTabs": false,
|
||||
"quoteProps": "preserve",
|
||||
"overrides": [
|
||||
{
|
||||
"files": [".vscode/*.json"],
|
||||
"options": {
|
||||
"parser": "jsonc",
|
||||
"quoteProps": "preserve",
|
||||
"singleQuote": false,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["*.md"],
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["src/codegen/bindgenv2/**/*.ts", "*.bindv2.ts"],
|
||||
"options": {
|
||||
"printWidth": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
15
.prettierrc.cjs
Normal file
15
.prettierrc.cjs
Normal file
@@ -0,0 +1,15 @@
|
||||
module.exports = {
|
||||
arrowParens: "avoid",
|
||||
printWidth: 120,
|
||||
trailingComma: "all",
|
||||
useTabs: false,
|
||||
quoteProps: "preserve",
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.md"],
|
||||
options: {
|
||||
printWidth: 80,
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
21
.scripts/make-dev-timer.ts
Normal file
21
.scripts/make-dev-timer.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// I would have made this a bash script but there isn't an easy way to track
|
||||
// time in bash sub-second cross platform.
|
||||
import fs from "fs";
|
||||
const start = Date.now() + 5;
|
||||
const result = Bun.spawnSync(process.argv.slice(2), {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
const end = Date.now();
|
||||
const diff = (Math.max(Math.round(end - start), 0) / 1000).toFixed(3);
|
||||
const success = result.exitCode === 0;
|
||||
try {
|
||||
const line = `${new Date().toISOString()}, ${success ? "success" : "fail"}, ${diff}\n`;
|
||||
if (fs.existsSync(".scripts/make-dev-stats.csv")) {
|
||||
fs.appendFileSync(".scripts/make-dev-stats.csv", line);
|
||||
} else {
|
||||
fs.writeFileSync(".scripts/make-dev-stats.csv", line);
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
process.exit(result.exitCode);
|
||||
13
.scripts/postinstall.sh
Executable file
13
.scripts/postinstall.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
|
||||
# if bun-webkit node_modules directory exists
|
||||
if [ -d ./node_modules/bun-webkit ]; then
|
||||
rm -f bun-webkit
|
||||
# get the first matching bun-webkit-* directory name
|
||||
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
|
||||
fi
|
||||
|
||||
# sets up vscode C++ intellisense
|
||||
rm -f .vscode/clang++
|
||||
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null
|
||||
32
.scripts/write-versions.sh
Normal file
32
.scripts/write-versions.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(git rev-parse HEAD:./src/bun.js/WebKit)
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
|
||||
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
|
||||
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
|
||||
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
|
||||
USOCKETS=$(cd src/deps/uws/uSockets && git rev-parse HEAD)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
|
||||
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
@@ -1,2 +0,0 @@
|
||||
[type.md]
|
||||
extend-ignore-words-re = ["^ba"]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user