Compare commits

..

14 Commits

Author SHA1 Message Date
Colin McDonnell
7c9352392d Update readme and regen 2023-06-09 19:21:49 -07:00
Jarred Sumner
fef9853d5a wip 2023-06-05 02:02:13 -07:00
Jarred Sumner
7dae496847 wip 2023-06-04 19:59:22 -07:00
Jarred Sumner
92b060c6e2 fix some CLI things 2023-06-04 19:06:02 -07:00
Jarred Sumner
2a64e8b3bb fixup 2023-06-04 19:05:42 -07:00
Jarred Sumner
fde3b7fbb6 Fix build 2023-06-04 18:54:26 -07:00
Ashcon Partovi
c16e769383 Add types and sample heapsnapshot for JSC and V8 2023-06-04 18:54:26 -07:00
Ashcon Partovi
5badc728d0 Add a DevTools client 2023-06-04 18:54:26 -07:00
Jarred Sumner
c4b3b321c2 slightly more progress 2023-06-04 18:54:26 -07:00
Jarred Sumner
cf599e77d9 Fix C++ compile errors 2023-06-04 18:54:26 -07:00
Jarred Sumner
b727689a9b pushing this but it doesn't fix anything 2023-06-04 18:54:26 -07:00
Ashcon Partovi
4a36470588 Add script to generate DevTools protocol types for JSC and V8 2023-06-04 18:54:26 -07:00
Jarred Sumner
ca08cf6b0a Update BunInspector.cpp 2023-06-04 18:54:26 -07:00
Jarred Sumner
10bd0fac3a WIP support inspector 2023-06-04 18:54:26 -07:00
37561 changed files with 427842 additions and 1629371 deletions

View File

@@ -1,170 +0,0 @@
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION
ARG OLD_BUN_VERSION
ARG TARGETARCH
ARG DEFAULT_CXXFLAGS
ARG DEFAULT_CFLAGS
ARG REPORTED_LLVM_VERSION
ENV DEBIAN_FRONTEND=noninteractive \
CI=true \
DOCKER=true
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
RUN apt-get update && apt-get install -y --no-install-recommends \
wget curl git python3 python3-pip ninja-build \
software-properties-common apt-transport-https \
ca-certificates gnupg lsb-release unzip \
libxml2-dev ruby ruby-dev bison gawk perl make golang \
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
&& apt-get update \
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
libgfortran5 libc6-dev \
&& wget https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \
&& ./llvm.sh ${LLVM_VERSION} all \
&& rm llvm.sh
RUN --mount=type=tmpfs,target=/tmp \
cmake_version="3.30.5" && \
if [ "$TARGETARCH" = "arm64" ]; then \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
else \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
fi && \
wget -O /tmp/cmake.sh "$cmake_url" && \
sh /tmp/cmake.sh --skip-license --prefix=/usr
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \
CFLAGS=${DEFAULT_CFLAGS} \
CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}"
RUN if [ "$TARGETARCH" = "arm64" ]; then \
export ARCH_PATH="aarch64-linux-gnu"; \
else \
export ARCH_PATH="x86_64-linux-gnu"; \
fi \
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
&& ldconfig
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
&& ln -sf /usr/bin/clang /usr/bin/cc \
&& ln -sf /usr/bin/clang++ /usr/bin/c++
ENV CC="clang" \
CXX="clang++" \
AR="llvm-ar-${LLVM_VERSION}" \
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
LD="lld-${LLVM_VERSION}"
RUN --mount=type=tmpfs,target=/tmp \
bash -c '\
set -euxo pipefail && \
source /etc/environment && \
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
unzip /tmp/bun.zip -d /tmp/bun && \
mv /tmp/bun/*/bun /usr/bin/bun && \
chmod +x /usr/bin/bun'
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
WORKDIR /workspace
FROM --platform=$BUILDPLATFORM base as buildkite
ARG BUILDKITE_AGENT_TAGS
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
echo "Downloading buildkite" && \
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
mkdir -p /tmp/buildkite-agent && \
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
COPY ../*/agent.mjs /var/bun/scripts/
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
WORKDIR /var/bun/scripts
ENV PATH=/root/.cargo/bin:$PATH
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
ARG LLVM_VERSION
WORKDIR /workspace/bun
COPY . /workspace/bun
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
ENV PATH=/root/.cargo/bin:$PATH
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
RUN --mount=type=tmpfs,target=/workspace/bun/build \
ls -la \
&& bun run build:release \
&& mkdir -p /target \
&& cp -r /workspace/bun/build/release/bun /target/bun

View File

@@ -1,122 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "error: must run as root"
exit 1
fi
# Check OS compatibility
if ! command -v dnf &> /dev/null; then
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
exit 1
fi
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
# Print each missing file
if [ ! -f /tmp/agent.mjs ]; then
echo "error: /tmp/agent.mjs is missing"
fi
if [ ! -f /tmp/Dockerfile ]; then
echo "error: /tmp/Dockerfile is missing"
fi
exit 1
fi
# Install Docker
dnf update -y
dnf install -y docker
systemctl enable docker
systemctl start docker || {
echo "error: failed to start Docker"
exit 1
}
# Create builder
docker buildx create --name builder --driver docker-container --bootstrap --use || {
echo "error: failed to create Docker buildx builder"
exit 1
}
# Set up Docker to start on boot
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
[Unit]
Description=Buildkite Docker Container
After=docker.service network-online.target
Requires=docker.service network-online.target
[Service]
TimeoutStartSec=0
Restart=always
RestartSec=5
ExecStartPre=-/usr/bin/docker stop buildkite
ExecStartPre=-/usr/bin/docker rm buildkite
ExecStart=/usr/bin/docker run \
--name buildkite \
--restart=unless-stopped \
--network host \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp \
buildkite:latest
[Install]
WantedBy=multi-user.target
EOF
echo "Building Buildkite image"
# Clean up any previous build artifacts
rm -rf /tmp/fakebun
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
# Copy required files
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
echo "error: failed to copy agent.mjs"
exit 1
}
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
echo "error: failed to copy Dockerfile"
exit 1
}
cd /tmp/fakebun || {
echo "error: failed to change directory"
exit 1
}
# Build the Buildkite image
docker buildx build \
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
--tag buildkite:latest \
--target buildkite \
-f .buildkite/Dockerfile \
--load \
. || {
echo "error: Docker build failed"
exit 1
}
# Create container to ensure image is cached in AMI
docker container create \
--name buildkite \
--restart=unless-stopped \
buildkite:latest || {
echo "error: failed to create buildkite container"
exit 1
}
# Reload systemd to pick up new service
systemctl daemon-reload
# Enable the service, but don't start it yet
systemctl enable buildkite-agent || {
echo "error: failed to enable buildkite-agent service"
exit 1
}
echo "Bootstrap complete"
echo "To start the Buildkite agent, run: "
echo " systemctl start buildkite-agent"

View File

@@ -1,16 +0,0 @@
# Uploads the latest CI workflow to Buildkite.
# https://buildkite.com/docs/pipelines/defining-steps
#
# Changes to this file must be manually edited here:
# https://buildkite.com/bun/bun/settings/steps
steps:
- if: "build.pull_request.repository.fork"
block: ":eyes:"
prompt: "Did you review the PR?"
blocked_state: "running"
- label: ":pipeline:"
agents:
queue: "build-darwin"
command:
- "node .buildkite/ci.mjs"

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +0,0 @@
import { getCommit, getSecret } from "../../scripts/utils.mjs";
console.log("Submitting...");
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
method: "POST",
});
console.log("Got status " + response.status);

View File

@@ -1,252 +0,0 @@
#!/bin/bash
set -eo pipefail
function assert_main() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
echo "error: Cannot upload release from a fork"
exit 1
fi
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
echo "error: Cannot upload release from a pull request"
exit 1
fi
if [ "$BUILDKITE_BRANCH" != "main" ]; then
echo "error: Cannot upload release from a branch other than main"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v "buildkite-agent" &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_github() {
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
assert_buildkite_secret "GITHUB_TOKEN"
# gh expects the token in $GH_TOKEN
export GH_TOKEN="$GITHUB_TOKEN"
}
function assert_aws() {
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
assert_buildkite_secret "$secret"
done
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
}
function assert_sentry() {
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
assert_buildkite_secret "$secret"
done
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_buildkite_secret() {
local key="$1"
local value=$(buildkite-agent secret get "$key" ${@:2})
if [ -z "$value" ]; then
echo "error: Cannot find $key secret"
echo ""
echo "hint: Create a secret named $key with a value:"
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
exit 1
fi
export "$key"="$value"
}
function release_tag() {
local version="$1"
if [ "$version" == "canary" ]; then
echo "canary"
else
echo "bun-v$version"
fi
}
function create_sentry_release() {
local version="$1"
local release="$version"
if [ "$version" == "canary" ]; then
release="$BUILDKITE_COMMIT-canary"
fi
run_command sentry-cli releases new "$release" --finalize
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
if [ "$version" == "canary" ]; then
run_command sentry-cli deploys new --env="canary" --release="$release"
fi
}
function download_buildkite_artifact() {
local name="$1"
local dir="$2"
if [ -z "$dir" ]; then
dir="."
fi
run_command buildkite-agent artifact download "$name" "$dir"
if [ ! -f "$dir/$name" ]; then
echo "error: Cannot find Buildkite artifact: $name"
exit 1
fi
}
function upload_github_asset() {
local version="$1"
local tag="$(release_tag "$version")"
local file="$2"
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
echo "warn: Uploading $file to $tag failed, retrying..."
sleep "$((RANDOM % 5 + 1))"
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
done
}
function update_github_release() {
local version="$1"
local tag="$(release_tag "$version")"
if [ "$tag" == "canary" ]; then
sleep 5 # There is possibly a race condition where this overwrites artifacts?
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
fi
}
function upload_s3_file() {
local folder="$1"
local file="$2"
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_discord_announcement() {
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
if [ -z "$value" ]; then
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
return
fi
local version="$1"
local commit="$BUILDKITE_COMMIT"
local short_sha="${commit:0:7}"
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
if [ "$version" == "canary" ]; then
local json_payload=$(cat <<EOF
{
"embeds": [{
"title": "New Bun Canary now available",
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
"color": 16023551,
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
}]
}
EOF
)
curl -H "Content-Type: application/json" \
-d "$json_payload" \
-sf \
"$value" >/dev/null
fi
}
function create_release() {
assert_main
assert_buildkite_agent
assert_github
assert_aws
assert_sentry
local tag="$1" # 'canary' or 'x.y.z'
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-linux-aarch64-musl.zip
bun-linux-aarch64-musl-profile.zip
bun-linux-x64-musl.zip
bun-linux-x64-musl-profile.zip
bun-linux-x64-musl-baseline.zip
bun-linux-x64-musl-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
)
function upload_artifact() {
local artifact="$1"
download_buildkite_artifact "$artifact"
if [ "$tag" == "canary" ]; then
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
else
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
fi
upload_s3_file "releases/$tag" "$artifact" &
upload_github_asset "$tag" "$artifact" &
wait
}
for artifact in "${artifacts[@]}"; do
upload_artifact "$artifact"
done
update_github_release "$tag"
create_sentry_release "$tag"
send_discord_announcement "$tag"
}
function assert_canary() {
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
echo "warn: Skipping release because this is not a canary build"
exit 0
fi
}
assert_canary
create_release "canary"

View File

@@ -1,9 +0,0 @@
WarningsAsErrors: "*"
FormatStyle: webkit
Checks: >
-*,
clang-analyzer-*,
-clang-analyzer-optin.core.EnumCastOutOfRange
-clang-analyzer-webkit.UncountedLambdaCapturesChecker
-clang-analyzer-optin.core.EnumCastOutOfRange
-clang-analyzer-webkit.RefCntblBaseVirtualDtor

View File

@@ -1,8 +0,0 @@
Index:
Background: Skip # Disable slow background indexing of these files.
CompileFlags:
CompilationDatabase: build/debug
Diagnostics:
UnusedIncludes: None

View File

@@ -1,27 +0,0 @@
---
description: How to build Bun
globs:
---
# How to build Bun
## CMake
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
* `CMakeLists.txt`
* `cmake/`
* `Globals.cmake` - macros and functions used by all the other files
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
## How to
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
```sh
bun run build # builds a debug build: `build/debug/bun-debug`
bun run build:release # builds a release build: `build/release/bun`
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
```

View File

@@ -1,139 +0,0 @@
---
description: Writing HMR/Dev Server tests
globs: test/bake/*
---
# Writing HMR/Dev Server tests
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
## File Structure
- `test/bake/bake-harness.ts` - shared utilities and test harness
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
- class `Dev` (controls subprocess for dev server)
- class `Client` (controls a happy-dom subprocess for having the page open)
- more helpers
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
## Categories
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
css.test.ts - CSS tests concern bundling bugs with CSS files
plugins.test.ts - Plugin tests concern plugins in development mode.
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
esm.test.ts - ESM tests are about various esm features in development mode.
html.test.ts - HTML tests are tests relating to HTML files themselves.
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
sourcemap.test.ts - Tests verifying source-maps are correct.
## `devTest` Basics
A test takes in two primary inputs: `files` and `async test(dev) {`
```ts
import { devTest, emptyHtmlFile } from "../bake-harness";
devTest("html file is watched", {
files: {
"index.html": emptyHtmlFile({
scripts: ["/script.ts"],
body: "<h1>Hello</h1>",
}),
"script.ts": `
console.log("hello");
`,
},
async test(dev) {
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
await dev.patch("index.html", {
find: "Hello",
replace: "World",
});
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
// Works
await using c = await dev.client("/");
await c.expectMessage("hello");
// Editing HTML reloads
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "World",
replace: "Hello",
});
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("index.html", {
find: "Hello",
replace: "Bar",
});
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
});
await c.expectMessage("hello");
await c.expectReload(async () => {
await dev.patch("script.ts", {
find: "hello",
replace: "world",
});
});
await c.expectMessage("world");
},
});
```
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
## Testing for bundling errors
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
```ts
devTest("import then create", {
files: {
"index.html": `
<!DOCTYPE html>
<html>
<head></head>
<body>
<script type="module" src="/script.ts"></script>
</body>
</html>
`,
"script.ts": `
import data from "./data";
console.log(data);
`,
},
async test(dev) {
const c = await dev.client("/", {
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
});
await c.expectReload(async () => {
await dev.write("data.ts", "export default 'data';");
});
await c.expectMessage("data");
},
});
```
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
```ts
await dev.delete("other.ts", {
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
});
```

View File

@@ -1,408 +0,0 @@
---
description: JavaScript class implemented in C++
globs: *.cpp
---
# Implementing JavaScript classes in C++
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSBinding.cpp](mdc:src/bun.js/bindings/NodeFSBinding.cpp).
- class FooPrototype : public JSC::JSNonFinalObject
- class FooConstructor : public JSC::InternalFunction
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
Generally, defining the subspace looks like this:
```c++
class Foo : public JSC::DestructibleObject {
// ...
template<typename MyClassT, JSC::SubspaceAccess mode>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
return nullptr;
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
vm,
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
}
```
It's better to put it in the .cpp file instead of the .h file, when possible.
## Defining properties
Define properties on the prototype. Use a const HashTableValues like this:
```C++
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
};
```
### Creating a prototype class
Follow a pattern like this:
```c++
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
public:
using Base = JSC::JSNonFinalObject;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
{
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
prototype->finishCreation(vm);
return prototype;
}
template<typename, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.plainObjectSpace();
}
DECLARE_INFO;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
structure->setMayBePrototype(true);
return structure;
}
private:
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure)
{
}
void finishCreation(JSC::VM& vm);
};
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
void JSX509CertificatePrototype::finishCreation(VM& vm)
{
Base::finishCreation(vm);
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
}
} // namespace Bun
```
### Getter definition:
```C++
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
return {};
}
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
}
```
### Setter definition
```C++
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
{
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
if (UNLIKELY(!thisObject))
return false;
JSValue value = JSValue::decode(encodedValue);
if (!value.isCell()) {
// TODO:
return true;
}
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
return true;
}
```
### Function definition
```C++
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
if (UNLIKELY(!thisObject)) {
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
return {};
}
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
}
```
### Constructor definition
```C++
JSC_DECLARE_HOST_FUNCTION(callStats);
JSC_DECLARE_HOST_FUNCTION(constructStats);
class JSStatsConstructor final : public JSC::InternalFunction {
public:
using Base = JSC::InternalFunction;
static constexpr unsigned StructureFlags = Base::StructureFlags;
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
{
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
constructor->finishCreation(vm, prototype);
return constructor;
}
DECLARE_INFO;
template<typename CellType, JSC::SubspaceAccess>
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
return &vm.internalFunctionSpace();
}
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
{
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
}
private:
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure, callStats, constructStats)
{
}
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
{
Base::finishCreation(vm, 0, "Stats"_s);
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
}
};
```
### Structure caching
If there's a class, prototype, and constructor:
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
```c++#ZigGlobalObject.cpp
void GlobalObject::finishCreation(VM& vm) {
// ...
m_JSStatsBigIntClassStructure.initLater(
[](LazyClassStructure::Initializer& init) {
// Call the function to initialize our class structure.
Bun::initJSBigIntStatsClassStructure(init);
});
```
Then, implement the function that creates the structure:
```c++
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
{
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
void GlobalObject::finishCreation(VM& vm) {
// ...
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject*>(init.owner)));
});
```
Then, implement the function that creates the structure:
```c++
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
{
// If there is a prototype:
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
// If there is no prototype or it only has
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
init.setPrototype(prototype);
init.setStructure(structure);
init.setConstructor(constructor);
}
```
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
```C++
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
{
VM& vm = globalObject->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
if (!callFrame->argumentCount()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
return {};
}
JSValue arg = callFrame->uncheckedArgument(0);
if (!arg.isCell()) {
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
return {};
}
auto* zigGlobalObject = defaultGlobalObject(globalObject);
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
JSValue newTarget = callFrame->newTarget();
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
auto scope = DECLARE_THROW_SCOPE(vm);
if (!newTarget) {
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
return {};
}
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
RETURN_IF_EXCEPTION(scope, {});
structure = InternalFunction::createSubclassStructure(
globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
scope.release();
}
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
}
```
### Expose to Zig
To expose the constructor to zig:
```c++
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
{
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
}
```
Zig:
```zig
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
```
To create an object (instance) of a JS class defined in C++ from Zig, follow the __toJS convention like this:
```c++
// X509* is whatever we need to create the object
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
{
// ... implementation details
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
}
```
And from Zig:
```zig
const X509 = opaque {
// ... class
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
return Bun__X509__toJS(globalObject, this);
}
};
```

View File

@@ -1,91 +0,0 @@
---
description: Writing tests for Bun
globs:
---
# Writing tests for Bun
## Where tests are found
You'll find all of Bun's tests in the `test/` directory.
* `test/`
* `cli/` - CLI command tests, like `bun install` or `bun init`
* `js/` - JavaScript & TypeScript tests
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
* `third_party/` - npm package tests, to validate that basic usage works in Bun
* `napi/` - N-API tests
* `v8/` - V8 C++ API tests
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
## How tests are written
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
```js
import { describe, test, expect } from "bun:test";
import assert, { AssertionError } from "assert";
describe("assert(expr)", () => {
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
expect(() => assert(expr)).not.toThrow();
});
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
expect(() => assert(expr)).toThrow(AssertionError);
});
});
```
## Testing conventions
* See `test/harness.ts` for common test utilities and helpers
* Be rigorous and test for edge-cases and unexpected inputs
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
* When you need to test Bun as a CLI, use the following pattern:
```js
import { test, expect } from "bun:test";
import { spawn } from "bun";
import { bunExe, bunEnv } from "harness";
test("bun --version", async () => {
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
cmd: [bunExe(), "--version"],
env: bunEnv,
stdout: "pipe",
stderr: "pipe",
});
const [ exitCode, stdout, stderr ] = await Promise.all([
exited,
new Response(stdoutStream).text(),
new Response(stderrStream).text(),
]);
expect({ exitCode, stdout, stderr }).toMatchObject({
exitCode: 0,
stdout: expect.stringContaining(Bun.version),
stderr: "",
});
});
```
## Before writing a test
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
* Always attempt to find related tests in an existing test file before creating a new test file

View File

@@ -1,7 +0,0 @@
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
bench
vendor
*-fixture.{js,ts}
zig-cache
packages/bun-uws/fuzzing
build

1539
.docker/chrome.json Normal file

File diff suppressed because it is too large Load Diff

14
.docker/chromium.pref Normal file
View File

@@ -0,0 +1,14 @@
# Note: 2 blank lines are required between entries
Package: *
Pin: release a=eoan
Pin-Priority: 500
Package: *
Pin: origin "ftp.debian.org"
Pin-Priority: 300
# Pattern includes 'chromium', 'chromium-browser' and similarly
# named dependencies:
Package: chromium*
Pin: origin "ftp.debian.org"
Pin-Priority: 700

View File

@@ -0,0 +1,8 @@
#!/bin/bash
set -euxo pipefail
name=$(openssl rand -hex 12)
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
echo -e "bun-binary-$name"

3
.docker/debian.list Normal file
View File

@@ -0,0 +1,3 @@
deb http://deb.debian.org/debian buster main
deb http://deb.debian.org/debian buster-updates main
deb http://deb.debian.org/debian-security buster/updates main

View File

@@ -0,0 +1,34 @@
export DOCKER_BUILDKIT=1
export BUILDKIT_ARCH=$(uname -m)
export ARCH=${BUILDKIT_ARCH}
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
export BUILDKIT_ARCH="amd64"
export ARCH=x64
fi
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
export BUILDKIT_ARCH="amd64"
export ARCH=x64
fi
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
export BUILDKIT_ARCH="arm64"
export ARCH=aarch64
fi
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
export BUILDKIT_ARCH="arm64"
export ARCH=aarch64
fi
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
echo "Unsupported platform: $BUILDKIT_ARCH"
exit 1
fi
export BUILD_ID=$(cat build-id)
export CONTAINER_NAME=bun-linux-$ARCH
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
export TEMP=/tmp/bun-0.0.$BUILD_ID

11
.docker/pull.sh Normal file
View File

@@ -0,0 +1,11 @@
#!/bin/bash
set -euxo pipefail
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
docker tag bunbunbunbun/bun-base:latest bun-base:latest
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest

47
.docker/run-dockerfile.sh Normal file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
source "dockerfile-common.sh"
export $CONTAINER_NAME=$CONTAINER_NAME-local
rm -rf $TEMP
mkdir -p $TEMP
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
if (($?)); then
echo "Failed to build container"
exit 1
fi
id=$(docker create $CONTAINER_NAME:latest)
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
if (($?)); then
echo "Failed to cp container"
exit 1
fi
cd $TEMP
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
docker rm -v $id
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
case $(uname -s) in
"Linux") target="linux" ;;
*) target="other" ;;
esac
if [ "$target" = "linux" ]; then
if command -v bun --version >/dev/null; then
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
fi
fi
echo "Saved to:"
echo $debug_abs
echo $abs

9
.docker/run-test.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
set -euxo pipefail
bun install
bun install --cwd ./test/snippets
bun install --cwd ./test/scripts
make $BUN_TEST_NAME

5
.docker/runner.sh Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -euxo pipefail
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest

5
.docker/unit-tests.sh Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -euxo pipefail
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest

View File

@@ -1,21 +1,17 @@
**/*.a
**/*.o
**/.next
**/CMakeCache.txt
**/node_modules
.git
examples
node_modules
**/node_modules
src/bun.js/WebKit/LayoutTests
zig-out
zig-build
**/*.o
**/*.a
examples
**/.next
.git
src/bun.js/WebKit
**/CMakeCache.txt
packages/**/bun
packages/**/bun-profile
src/bun.js/WebKit
src/bun.js/WebKit/LayoutTests
zig-build
.zig-cache
zig-out
build
vendor
node_modules
*.trace
packages/bun-uws/fuzzing
zig-cache

View File

@@ -1,8 +0,0 @@
# https://EditorConfig.org
root = true
[*]
charset = utf-8
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf

67
.gitattributes vendored
View File

@@ -1,54 +1,33 @@
*.css text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.js text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.jsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.tsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.ts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.c text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.toml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.json text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.lockb binary diff=lockb
.vscode/launch.json linguist-generated
src/api/schema.d.ts linguist-generated
fixture.*.c linguist-generated
src/api/schema.js linguist-generated
*-fixture* linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.lockb binary diff=lockb
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
src/js/out/WebCoreJSBuiltins.h linguist-generated
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
src/bun.js/bindings/JSSink.h linguist-generated
src/bun.js/bindings/JSSink.zig linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation
# Don't count tests in the language stats - https://github.com/github-linguist/linguist/blob/master/docs/overrides.md
test/**/* linguist-documentation
bench/**/* linguist-documentation
examples/**/* linguist-documentation
vendor/*.c linguist-vendored
vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored

View File

@@ -0,0 +1,35 @@
name: 📥 Install Problem
description: Report an issue during install or upgrade
labels: [bug, install]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting an issue, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Please try to include as much information as possible.
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: How did you attempt to install or upgrade?
description: Please provide the commands you ran to install or upgrade.
validations:
required: true
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
validations:
required: true
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -1,8 +1,6 @@
name: 🐛 Bug Report
description: Report an issue that should be fixed
labels:
- bug
- needs triage
labels: [bug]
body:
- type: markdown
attributes:
@@ -12,15 +10,11 @@ body:
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
description: Copy the output of `bun -v`
- type: input
attributes:
label: What platform is your computer?

View File

@@ -0,0 +1,24 @@
name: 🚀 Feature Request
description: Suggest an idea, feature, or enhancement
labels: [enhancement]
body:
- type: markdown
attributes:
value: |
Thank you for submitting an idea. It helps make Bun better.
If you want to discuss Bun, or learn how others are using Bun, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback-ideas`](https://discord.gg/unwUnHBNqy) channel.
- type: textarea
attributes:
label: What is the problem this feature would solve?
validations:
required: true
- type: textarea
attributes:
label: What is the feature you are proposing to solve the problem?
validations:
required: true
- type: textarea
attributes:
label: What alternatives have you considered?

View File

@@ -1,45 +0,0 @@
name: 🇹 TypeScript Type Bug Report
description: Report an issue with TypeScript types
labels: [bug, types]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
- type: textarea
attributes:
label: What is the expected behavior?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -1,24 +0,0 @@
name: 🚀 Feature Request
description: Suggest an idea, feature, or enhancement
labels: [enhancement]
body:
- type: markdown
attributes:
value: |
Thank you for submitting an idea. It helps make Bun better.
If you want to discuss Bun, or learn how others are using Bun, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback`](https://discord.gg/unwUnHBNqy) channel.
- type: textarea
attributes:
label: What is the problem this feature would solve?
validations:
required: true
- type: textarea
attributes:
label: What is the feature you are proposing to solve the problem?
validations:
required: true
- type: textarea
attributes:
label: What alternatives have you considered?

View File

@@ -1,27 +0,0 @@
name: Prefilled crash report
description: Report a crash in Bun
labels:
- crash
body:
- type: markdown
attributes:
value: |
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
- type: textarea
id: code
attributes:
label: How can we reproduce the crash?
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be
automatically formatted into code, so no need for backticks.
render: shell
- type: textarea
id: remapped_trace
attributes:
label: Stack Trace (bun.report)
validations:
required: true

View File

@@ -1,34 +0,0 @@
name: bun install crash report
description: Report a crash in bun install
labels:
- npm
- crash
body:
- type: markdown
attributes:
value: |
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
- type: textarea
id: package_json
attributes:
label: "`package.json` file"
description: "Can you upload your `package.json` file? This helps us reproduce the crash."
render: json
- type: textarea
id: repro
attributes:
label: How can we reproduce the crash?
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. This will be
automatically formatted into code, so no need for backticks.
render: shell
- type: textarea
id: remapped_trace
attributes:
label: Stack Trace (bun.report)
validations:
required: true

View File

@@ -1,43 +0,0 @@
name: Bump version
description: Bump the version of Bun
inputs:
version:
description: The most recent version of Bun.
required: true
type: string
token:
description: The GitHub token to use for creating a pull request.
required: true
type: string
default: ${{ github.token }}
runs:
using: composite
steps:
- name: Run Bump
shell: bash
id: bump
run: |
set -euo pipefail
MESSAGE=$(bun ./scripts/bump.ts patch --last-version=${{ inputs.version }})
LATEST=$(cat LATEST)
echo "version=$LATEST" >> $GITHUB_OUTPUT
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
with:
add-paths: |
CMakeLists.txt
LATEST
token: ${{ inputs.token }}
commit-message: Bump version to ${{ steps.bump.outputs.version }}
title: Bump to ${{ steps.bump.outputs.version }}
delete-branch: true
branch: github-actions/bump-version-${{ steps.bump.outputs.version }}--${{ github.run_id }}
body: |
## What does this PR do?
${{ steps.bump.outputs.message }}
Auto-bumped by [this workflow](https://github.com/oven-sh/bun/actions/workflows/release.yml)

View File

@@ -1,51 +0,0 @@
name: Setup Bun
description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
default: latest
required: false
baseline:
type: boolean
description: "Whether to use the baseline version of bun."
default: false
required: false
download-url:
type: string
description: "The base URL to download bun from."
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
required: false
runs:
using: composite
steps:
- name: Setup Bun
shell: bash
run: |
case "$(uname -s)" in
Linux*) os=linux;;
Darwin*) os=darwin;;
*) os=windows;;
esac
case "$(uname -m)" in
arm64 | aarch64) arch=aarch64;;
*) arch=x64;;
esac
case "${{ inputs.baseline }}" in
true | 1) target="bun-${os}-${arch}-baseline";;
*) target="bun-${os}-${arch}";;
esac
case "${{ inputs.bun-version }}" in
latest) release="latest";;
canary) release="canary";;
*) release="bun-v${{ inputs.bun-version }}";;
esac
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5
unzip ${target}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}

View File

@@ -1,50 +0,0 @@
### What does this PR do?
<!-- **Please explain what your changes do**, example: -->
<!--
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
-->
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
- [ ] Code changes
### How did you verify your code works?
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
<!-- I wrote automated tests -->
<!-- If JavaScript/TypeScript modules or builtins changed:
- [ ] I included a test for the new code, or existing tests cover it
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
-->
<!-- If Zig files changed:
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
-->
<!-- If new methods, getters, or setters were added to a publicly exposed class:
- [ ] I added TypeScript types for the new methods, getters, or setters
-->
<!-- If dependencies in tests changed:
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
-->
<!-- If a new builtin ESM/CJS module was added:
- [ ] I updated Aliases in `module_loader.zig` to include the new module
- [ ] I added a test that imports the module
- [ ] I added a test that require() the module
-->

18
.github/workflows/bun-deploy-site.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
name: Deploy site
on:
push:
paths:
- "docs/**"
branches: [main]
jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

View File

@@ -0,0 +1,50 @@
name: bun-ecosystem-test
on:
schedule:
- cron: "0 15 * * *" # every day at 7am PST
workflow_dispatch:
inputs:
version:
description: "The version of Bun to run"
required: true
default: "canary"
type: string
jobs:
test:
name: ${{ matrix.tag }}
runs-on: ${{ matrix.os }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
tag: linux-x64
url: linux/x64?avx2=true
- os: ubuntu-latest
tag: linux-x64-baseline
url: linux/x64?baseline=true
# FIXME: runner fails with "No tests found"?
#- os: macos-latest
# tag: darwin-x64
# url: darwin/x64?avx2=true
- os: macos-latest
tag: darwin-x64-baseline
url: darwin/x64?baseline=true
steps:
- id: checkout
name: Checkout
uses: Bhacaz/checkout-files@v2
with:
files: packages/bun-internal-test
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
- id: test
name: Test
working-directory: packages/bun-internal-test
run: bun run test:ecosystem

View File

@@ -0,0 +1,41 @@
name: bun-framework-next
on:
push:
paths:
- packages/bun-framework-next/**/*
branches: [main, bun-framework-next-actions]
pull_request:
paths:
- packages/bun-framework-next/**/*
branches: [main]
jobs:
build:
name: lint, test and build on Node ${{ matrix.node }} and ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
node: ["14.x"]
os: [macOS-latest]
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Use Node ${{ matrix.node }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node }}
- name: Install PNPM
uses: pnpm/action-setup@v2.0.1
with:
version: 6.21.0
- name: Install dependencies
run: cd packages/bun-framework-next && pnpm install
- name: Type check bun-framework-next
run: cd packages/bun-framework-next && pnpm check

136
.github/workflows/bun-linux-aarch64.yml vendored Normal file
View File

@@ -0,0 +1,136 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"

227
.github/workflows/bun-linux-build.yml vendored Normal file
View File

@@ -0,0 +1,227 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
linux-test:
name: Tests ${{matrix.tag}}
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

466
.github/workflows/bun-mac-aarch64.yml vendored Normal file
View File

@@ -0,0 +1,466 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
# arch: x86_64
# tag: bun-obj-darwin-x64
- cpu: native
arch: aarch64
tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun.o
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
- name: Download WebKit
if: matrix.compile_obj
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: Compile dependencies
if: matrix.dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $BUN_DEPS_OUT_DIR
make vendor-without-check
- name: Compile C++
if: matrix.compile_obj
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
make clean-bindings
make -j $(sysctl -n hw.ncpu) release-bindings
- name: Upload C++
if: matrix.compile_obj
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Upload Dependencies
if: matrix.dependencies
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: Download C++
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Dependencies
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{ runner.temp }}/bun-deps
- name: Download Object
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir ${{matrix.tag}}-profile
mkdir ${{matrix.tag}}
/usr/bin/strip -S bun
mv bun-profile ${{matrix.tag}}-profile/bun-profile
mv bun ${{matrix.tag}}/bun
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-aarch64
runner: macos-arm64
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -0,0 +1,470 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
# arch: x86_64
# tag: bun-obj-darwin-x64
# - cpu: native
# arch: aarch64
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun.o
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
- name: ccache (c++)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.compile_obj
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
- name: Download WebKit
if: matrix.compile_obj
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: Compile dependencies
if: matrix.dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
make vendor-without-check
- name: Compile C++
if: matrix.compile_obj
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
make -j $(sysctl -n hw.ncpu) release-bindings
- name: Upload C++
if: matrix.compile_obj
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Upload Dependencies
if: matrix.dependencies
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: Download C++
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Dependencies
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{ runner.temp }}/bun-deps
- name: Download Object
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir ${{matrix.tag}}-profile
mkdir ${{matrix.tag}}
/usr/bin/strip -S bun
mv bun-profile ${{matrix.tag}}-profile/bun-profile
mv bun ${{matrix.tag}}/bun
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

472
.github/workflows/bun-mac-x64.yml vendored Normal file
View File

@@ -0,0 +1,472 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
pull_request:
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
arch: x86_64
tag: bun-obj-darwin-x64
# - cpu: native
# arch: aarch64
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun.o
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
if: matrix.compile_obj
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
- name: ccache (c++)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.compile_obj
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
- name: Compile dependencies
if: matrix.dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
make vendor-without-check
- name: Compile C++
if: matrix.compile_obj
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
make -j $(sysctl -n hw.ncpu) release-bindings
- name: Upload C++
if: matrix.compile_obj
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Upload Dependencies
if: matrix.dependencies
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: Download C++
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Dependencies
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{ runner.temp }}/bun-deps
- name: Download Object
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
rm -rf packages/${{ matrix.package }}
mkdir -p packages/${{ matrix.package }}
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
make bun-link-lld-release copy-to-bun-release-dir-bin
- name: Zip
env:
CPU_TARGET: ${{ matrix.cpu }}
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
BUN_RELEASE_DIR: ${{ runner.temp }}/release
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
run: |
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir ${{matrix.tag}}-profile
mkdir ${{matrix.tag}}
/usr/bin/strip -S bun
mv bun-profile ${{matrix.tag}}-profile/bun-profile
mv bun ${{matrix.tag}}/bun
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

179
.github/workflows/bun-release-canary.yml vendored Normal file
View File

@@ -0,0 +1,179 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# npm-types:
# name: Release types to NPM
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: packages/bun-types
# steps:
# - id: checkout
# name: Checkout
# uses: actions/checkout@v3
# - id: setup-node
# name: Setup Node.js
# uses: actions/setup-node@v3
# with:
# node-version: latest
# - id: setup-bun
# name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: canary
# - id: bun-install
# name: Install Dependencies
# run: bun install
# - id: setup-env
# name: Setup Environment
# run: |
# SHA=$(git rev-parse --short "$GITHUB_SHA")
# VERSION=$(bun --version)
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
# echo "Setup tag: ${TAG}"
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
# - id: bun-run
# name: Build
# run: bun run build
# env:
# BUN_VERSION: ${{ env.TAG }}
# - id: npm-publish
# name: Release
# uses: JS-DevTools/npm-publish@v1
# with:
# package: packages/bun-types/dist/package.json
# token: ${{ secrets.NPM_TOKEN }}
# tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- canary
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -0,0 +1,54 @@
name: bun-release-canary
concurrency: release-canary
on:
push:
branches:
- main
paths:
- "packages/bun-types/**"
workflow_dispatch:
jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary

257
.github/workflows/bun-release.yml vendored Normal file
View File

@@ -0,0 +1,257 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.TAG }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

41
.github/workflows/bun-types-tests.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: bun-types
on:
push:
paths:
- "packages/bun-types/**"
branches: [main]
pull_request:
paths:
- "packages/bun-types/**"
jobs:
tests:
name: type-tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: |
bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test

View File

@@ -1,41 +0,0 @@
name: clang-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-format:
name: clang-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Clang Format
env:
LLVM_VERSION: ${{ env.LLVM_VERSION }}
run: |
bun run clang-format
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run clang-format`"

View File

@@ -1,41 +0,0 @@
name: clang-tidy
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.2.0"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-tidy:
name: clang-tidy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Clang Tidy
env:
LLVM_VERSION: ${{ env.LLVM_VERSION }}
run: |
bun run clang-tidy:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run clang-tidy`"

View File

@@ -1,85 +0,0 @@
name: C++ Linter comment
permissions:
actions: read
pull-requests: write
on:
workflow_run:
workflows:
- lint-cpp
types:
- completed
jobs:
comment-lint:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Comment
runs-on: ubuntu-latest
steps:
- name: Download Comment
uses: actions/download-artifact@v4
with:
name: format.log
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: PR Number
uses: actions/download-artifact@v4
with:
name: pr-number.txt
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Did Fail
uses: actions/download-artifact@v4
with:
name: did_fail.txt
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Setup Environment
id: env
shell: bash
run: |
# Copy to outputs
echo "pr-number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT
{
echo 'text_output<<EOF'
cat format.log
echo EOF
} >> "$GITHUB_OUTPUT"
echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ steps.env.outputs.pr-number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
- name: Update Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.env.outputs.did_fail != '0'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ steps.env.outputs.pr-number }}
body: |
@${{ github.actor }}, `clang-tidy` had something to share with you about your code:
```cpp
${{ steps.env.outputs.text_output }}
```
Commit: ${{ github.event.workflow_run.head_sha || github.sha }}
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
edit-mode: replace
- name: Update Previous Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ steps.env.outputs.pr-number }}
body: |
clang-tidy nits are fixed! Thank you.
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
edit-mode: replace

View File

@@ -1,21 +0,0 @@
name: Docs
on:
push:
paths:
- "docs/**"
- "CONTRIBUTING.md"
branches:
- main
jobs:
deploy:
name: Deploy
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
steps:
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
- name: Trigger Webhook
run: |
curl -v ${{ secrets.VERCEL_DEPLOY_HOOK }}

View File

@@ -1,171 +0,0 @@
name: Issue Labeled
env:
BUN_VERSION: 1.1.44
on:
issues:
types: [labeled]
jobs:
# on-bug:
# runs-on: ubuntu-latest
# if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
# permissions:
# issues: write
# steps:
# - name: Checkout
# uses: actions/checkout@v4
# with:
# sparse-checkout: |
# scripts
# .github
# CMakeLists.txt
# - name: Setup Bun
# uses: ./.github/actions/setup-bun
# with:
# bun-version: ${{ env.BUN_VERSION }}
# - name: "categorize bug"
# id: add-labels
# env:
# GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
# GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
# ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
# shell: bash
# run: |
# echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
# LABELS=$(bun scripts/label-issue.ts)
# echo "labels=$LABELS" >> $GITHUB_OUTPUT
# - name: Add labels
# uses: actions-cool/issues-helper@v3
# if: steps.add-labels.outputs.labels != ''
# with:
# actions: "add-labels"
# token: ${{ secrets.GITHUB_TOKEN }}
# issue-number: ${{ github.event.issue.number }}
# labels: ${{ steps.add-labels.outputs.labels }}
on-labeled:
runs-on: ubuntu-latest
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
permissions:
issues: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
scripts
.github
CMakeLists.txt
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: "add platform and command label"
id: add-labels
if: github.event.label.name == 'crash'
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
shell: bash
run: |
LABELS=$(bun scripts/read-issue.ts)
echo "labels=$LABELS" >> $GITHUB_OUTPUT
bun scripts/is-outdated.ts
if [[ -f "is-outdated.txt" ]]; then
echo "is-outdated=true" >> $GITHUB_OUTPUT
fi
if [[ -f "outdated.txt" ]]; then
echo "oudated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
fi
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
with:
actions: "add-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days.

View File

@@ -1,22 +0,0 @@
name: Lint
on:
pull_request:
workflow_dispatch:
env:
BUN_VERSION: "1.2.0"
OXLINT_VERSION: "0.15.0"
jobs:
lint-js:
name: "Lint JavaScript"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Lint
run: bunx oxlint --config oxlint.json --quiet --format github

View File

@@ -1,89 +0,0 @@
name: Comment on updated submodule
on:
pull_request_target:
paths:
- "src/generated_versions_list.zig"
- ".github/workflows/on-submodule-update.yml"
jobs:
comment:
name: Comment
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: Checkout current
uses: actions/checkout@v4
with:
sparse-checkout: |
src
- name: Hash generated versions list
id: hash
run: |
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Checkout base
uses: actions/checkout@v4
with:
ref: ${{ github.base_ref }}
sparse-checkout: |
src
- name: Hash base
id: base
run: |
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
- name: Compare
id: compare
run: |
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
echo "changed=true" >> $GITHUB_OUTPUT
else
echo "changed=false" >> $GITHUB_OUTPUT
fi
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment submodule-updated -->
- name: Write Warning Comment
uses: peter-evans/create-or-update-comment@v4
if: steps.compare.outputs.changed == 'true'
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
<!-- generated-comment submodule-updated -->
- name: Add labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'true'
with:
actions: "add-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Remove labels
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false'
with:
actions: "remove-labels"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
labels: "changed-submodules"
- name: Delete outdated comment
uses: actions-cool/issues-helper@v3
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
with:
actions: "delete-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.comment.outputs.comment-id }}

76
.github/workflows/prettier-fmt.yml vendored Normal file
View File

@@ -0,0 +1,76 @@
name: prettier
on:
pull_request:
branches:
- main
- jarred/test-actions
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
prettier-fmt:
name: prettier
runs-on: ubuntu-latest
outputs:
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- id: install
name: Install prettier
run: bun install
- name: Run prettier
id: fmt
run: |
rm -f .failed
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
if [ -s .failed ]; then
delimiter="$(openssl rand -hex 8)"
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
fi
- name: Comment on PR
if: steps.fmt.outputs.prettier_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
message: |
❌ @${{ github.actor }} `prettier` reported errors
```js
${{ steps.fmt.outputs.prettier_fmt_errs }}
```
To one-off fix this manually, run:
```sh
bun fmt
```
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.prettier_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `prettier` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Fail the job
if: steps.fmt.outputs.prettier_fmt_errs != ''
run: exit 1

View File

@@ -1,37 +0,0 @@
name: prettier-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.2.0"
jobs:
prettier-format:
name: prettier-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Prettier Format
run: |
bun run prettier:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run prettier:extra`"

View File

@@ -1,368 +0,0 @@
# TODO: Move this to bash scripts intead of Github Actions
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
name: Release
concurrency: release
env:
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
on:
release:
types:
- published
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
inputs:
is-latest:
description: Is this the latest release?
type: boolean
default: false
tag:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
required: true
use-docker:
description: Should Docker images be released?
type: boolean
default: false
use-npm:
description: Should npm packages be published?
type: boolean
default: false
use-homebrew:
description: Should binaries be released to Homebrew?
type: boolean
default: false
use-s3:
description: Should binaries be uploaded to S3?
type: boolean
default: false
use-types:
description: Should types be released to npm?
type: boolean
default: false
use-definitelytyped:
description: "Should types be PR'd to DefinitelyTyped?"
type: boolean
default: false
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: write
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
- name: Install Dependencies
run: bun install
- name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
with:
# To workaround issue
ref: main
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
- name: Install Dependencies
run: bun install
- name: Release
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- name: Setup Bun
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.3"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "canary" # Must be 'canary' so tag is correct
- name: Install Dependencies
run: bun install
- name: Setup Tag
if: ${{ env.BUN_VERSION == 'canary' }}
run: |
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
- name: Release (canary)
if: ${{ env.BUN_VERSION == 'canary' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
- name: Release (latest)
if: ${{ env.BUN_LATEST == 'true' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/package.json
token: ${{ secrets.NPM_TOKEN }}
definitelytyped:
name: Make pr to DefinitelyTyped to update `bun-types` version
runs-on: ubuntu-latest
needs: npm-types
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
permissions:
contents: read
steps:
- name: Checkout (DefinitelyTyped)
uses: actions/checkout@v4
with:
repository: DefinitelyTyped/DefinitelyTyped
- name: Checkout (bun)
uses: actions/checkout@v4
with:
path: bun
- name: Setup Bun
uses: ./bun/.github/actions/setup-bun
with:
bun-version: "1.2.0"
- id: bun-version
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
- name: Update bun-types version in package.json
run: |
bun -e '
const file = Bun.file("./types/bun/package.json");
const json = await file.json();
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
json.dependencies["bun-types"] = version;
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
await file.write(JSON.stringify(json, null, 4) + "\n");
'
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
with:
token: ${{ secrets.ROBOBUN_TOKEN }}
add-paths: ./types/bun/package.json
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
body: |
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
https://bun.sh/blog/${{ env.BUN_VERSION }}
push-to-fork: oven-sh/DefinitelyTyped
branch: ${{env.BUN_VERSION}}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
include:
- variant: debian
suffix: ""
- variant: debian
suffix: -debian
- variant: slim
suffix: -slim
dir: debian-slim
- variant: alpine
suffix: -alpine
- variant: distroless
suffix: -distroless
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v3
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v5
with:
images: oven/bun
flavor: |
latest=false
tags: |
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v6
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.BUN_VERSION }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
permissions:
contents: read
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- name: Update Tap
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
- name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
commit_message: Release ${{ env.BUN_VERSION }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.2.0"
- name: Install Dependencies
run: bun install
- name: Release
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun
notify-sentry:
name: Notify Sentry
runs-on: ubuntu-latest
needs: s3
steps:
- name: Notify Sentry
uses: getsentry/action-release@v1.7.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
ignore_missing: true
ignore_empty: true
version: ${{ env.BUN_VERSION }}
environment: production
bump:
name: "Bump version"
runs-on: ubuntu-latest
if: ${{ github.event_name != 'schedule' }}
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
if: ${{ env.BUN_LATEST == 'true' }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
if: ${{ env.BUN_LATEST == 'true' }}
with:
bun-version: "1.2.0"
- name: Bump version
uses: ./.github/actions/bump
if: ${{ env.BUN_LATEST == 'true' }}
with:
version: ${{ env.BUN_VERSION }}
token: ${{ github.token }}

View File

@@ -1,33 +0,0 @@
name: Lint
permissions:
contents: read
env:
LLVM_VERSION: 16
BUN_VERSION: "1.2.0"
on:
workflow_call:
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
outputs:
text_output: ${{ steps.lint.outputs.text_output }}
json_output: ${{ steps.lint.outputs.json_output }}
count: ${{ steps.lint.outputs.count }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install Dependencies
run: |
bun --cwd=packages/bun-internal-test install
- name: Lint
id: lint
run: |
bun packages/bun-internal-test/src/linter.ts

View File

@@ -1,30 +0,0 @@
name: Close inactive issues
on:
# schedule:
# - cron: "15 * * * *"
workflow_dispatch:
jobs:
close-issues:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v5
with:
days-before-issue-close: 5
any-of-issue-labels: "needs repro,waiting-for-author"
exempt-issue-labels: "neverstale"
exempt-pr-labels: "neverstale"
remove-stale-when-updated: true
stale-issue-label: "stale"
stale-pr-label: "stale"
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
days-before-pr-stale: 30
days-before-pr-close: 14
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
close-pr-message: "This pull request has been closed due to inactivity."
repo-token: ${{ github.token }}
operations-per-run: 1000

View File

@@ -1,32 +0,0 @@
name: Test Bump version
on:
workflow_dispatch:
inputs:
version:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
required: true
env:
BUN_VERSION: "1.2.0"
jobs:
bump:
name: "Bump version"
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Bump version
uses: ./.github/actions/bump
with:
version: ${{ inputs.version }}
token: ${{ github.token }}

View File

@@ -1,19 +0,0 @@
name: Typos
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Spellcheck
uses: crate-ci/typos@v1.29.4
with:
files: docs/**/*

View File

@@ -1,92 +0,0 @@
name: Update c-ares
on:
schedule:
- cron: "0 4 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check c-ares version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildCares.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildCares.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildCares.cmake
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-cares-${{ github.run_number }}
body: |
## What does this PR do?
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -1,92 +0,0 @@
name: Update libarchive
on:
schedule:
- cron: "0 3 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libarchive version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibArchive.cmake
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libarchive-${{ github.run_number }}
body: |
## What does this PR do?
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -1,92 +0,0 @@
name: Update libdeflate
on:
schedule:
- cron: "0 2 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libdeflate version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibDeflate.cmake
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libdeflate-${{ github.run_number }}
body: |
## What does this PR do?
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -1,92 +0,0 @@
name: Update lolhtml
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lolhtml version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLolHtml.cmake
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lolhtml-${{ github.run_number }}
body: |
## What does this PR do?
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -1,92 +0,0 @@
name: Update lshpack
on:
schedule:
- cron: "0 5 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lshpack version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLshpack.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLshpack.cmake
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lshpack-${{ github.run_number }}
body: |
## What does this PR do?
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -1,109 +0,0 @@
name: Update SQLite3
on:
schedule:
- cron: "0 6 * * 0" # Run weekly
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check SQLite version
id: check-version
run: |
set -euo pipefail
# Get current version from the header file using SQLITE_VERSION_NUMBER
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
if [ -z "$CURRENT_VERSION_NUM" ]; then
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
exit 1
fi
# Convert numeric version to semantic version for display
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
# Fetch SQLite download page
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
if [ -z "$DOWNLOAD_PAGE" ]; then
echo "Error: Failed to fetch SQLite download page"
exit 1
fi
# Extract latest version and year from the amalgamation link
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
echo "Error: Could not extract latest version info"
exit 1
fi
# Convert numeric version to semantic version for display
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
# Debug output
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
- name: Update SQLite if needed
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
run: |
set -euo pipefail
TEMP_DIR=$(mktemp -d)
cd $TEMP_DIR
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
# Download and extract latest version
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
# Add header comment and copy files
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
- name: Create Pull Request
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
src/bun.js/bindings/sqlite/sqlite3.c
src/bun.js/bindings/sqlite/sqlite3_local.h
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
body: |
## What does this PR do?
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

87
.github/workflows/zig-fmt.yml vendored Normal file
View File

@@ -0,0 +1,87 @@
name: zig-fmt
env:
ZIG_VERSION: 0.11.0-dev.2571+31738de28
on:
pull_request:
branches:
- main
- jarred/test-actions
paths:
- "src/**/*.zig"
- "src/*.zig"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
zig-fmt:
name: zig fmt
runs-on: ubuntu-latest
outputs:
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install zig
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
delimiter="$(openssl rand -hex 8)"
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
if [ -s zig-fmt.err ]; then
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
fi
if [ -s zig-fmt.err2 ]; then
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
fi
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
- name: Comment on PR
if: steps.fmt.outputs.zig_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
message: |
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
```zig
// # zig fmt --check src/*.zig src/**/*.zig
${{ steps.fmt.outputs.zig_fmt_errs }}
```
To one-off fix this manually, run:
```sh
zig fmt src/*.zig src/**/*.zig
```
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.zig_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `zig fmt` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Fail the job
if: steps.fmt.outputs.zig_fmt_errs != ''
run: exit 1

View File

@@ -1,34 +0,0 @@
name: zig-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.2.0"
jobs:
zig-format:
name: zig-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Zig Format
run: |
bun run zig-format:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run zig-format`"

268
.gitignore vendored
View File

@@ -1,185 +1,123 @@
.DS_Store
.env
.envrc
.eslintcache
.idea
.next
.ninja_deps
.ninja_log
.npm
.npm.gz
.parcel-cache
.swcrc
.trace
.uuid
.vs
.vscode/clang*
.vscode/cpp*
.zig-cache
.bake-debug
*.a
*.bc
*.big
*.blob
*.bun
*.crash
*.database
*.db
*.dmg
*.dSYM
*.generated.ts
*.jsb
*.lib
*.log
zig-cache
packages/*/*.wasm
*.o
*.a
profile.json
node_modules
.swcrc
yarn.lock
dist
*.tmp
*.log
*.out.js
*.out.refresh.js
*.pdb
*.sqlite
*.tmp
*.trace
*.wat
*.zip
**/.verdaccio-db.json
**/*.dir
**/*.pdb
**/*.sln*
**/*.vcxproj*
**/package-lock.json
/.cache
/.webkit-cache
/build-*/
/bun-webkit
/kcov-out
/test-report.json
/test-report.md
/test.js
/test.ts
/test.zig
/testdir
build
build.ninja
bun-binary
bun-mimalloc
bun-nomimalloc
bun-singlehtreaded
bun-test-scratch
bun-zigld
cmake_install.cmake
CMakeCache.txt
CMakeFiles
cold-jsc-start
cold-jsc-start.d
compile_commands.json
*.wat
zig-out
pnpm-lock.yaml
README.md.template
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/deps/zig-clap/.github
src/deps/zig-clap/.gitattributes
out
outdir
.trace
cover
coverage
coverv
dist
esbuilddir
examples/lotta-modules/bun-nofscache
examples/lotta-modules/bun-old
examples/lotta-modules/bun-yday
failing-tests.txt
*.trace
github
make-dev-stats.csv
misctools/fetch
misctools/machbench
misctools/sha
myscript.sh
node_modules
node_modules_*
out
out.*
outcss
outdir
out
.parcel-cache
esbuilddir
*.bun
parceldist
esbuilddir
outdir/
packages/*/*.wasm
packages/bun-*/*.o
outcss
.next
txt.js
.idea
.vscode/cpp*
.vscode/clang*
node_modules_*
*.jsb
*.zip
bun-zigld
bun-singlehtreaded
bun-nomimalloc
bun-mimalloc
examples/lotta-modules/bun-yday
examples/lotta-modules/bun-old
examples/lotta-modules/bun-nofscache
src/node-fallbacks/out/*
src/node-fallbacks/node_modules
sign.json
release/
*.dmg
sign.*.json
packages/debug-*
packages/bun-cli/postinstall.js
packages/bun-*/bun
packages/bun-*/bun-profile
packages/bun-*/debug-bun
packages/bun-cli/bin/*
packages/bun-*/*.o
packages/bun-cli/postinstall.js
packages/bun-wasm/*.cjs
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.js
packages/bun-wasm/*.map
packages/bun-wasm/*.mjs
packages/debug-*
parceldist
pnpm-lock.yaml
profile.json
README.md.template
release/
scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/generated_enum_extractor.zig
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/debug-bindings-obj
src/deps/zig-clap/.gitattributes
src/deps/zig-clap/.github
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/fallback.version
src/js/out/DebugPath.h
src/js/out/functions*
src/js/out/modules*
src/js/out/tmp
src/node-fallbacks/node_modules
src/node-fallbacks/out/*
src/runtime.version
packages/bun-cli/bin/*
bun-test-scratch
misctools/fetch
src/deps/libiconv
src/deps/openssl
src/tests.zig
*.blob
src/deps/s2n-tls
.npm
.npm.gz
bun-binary
src/deps/PLCrashReporter/
*.dSYM
*.crash
misctools/sha
packages/bun-wasm/*.mjs
packages/bun-wasm/*.cjs
packages/bun-wasm/*.map
packages/bun-wasm/*.js
packages/bun-wasm/*.d.ts
*.bc
src/fallback.version
src/runtime.version
*.sqlite
*.database
*.db
misctools/machbench
*.big
.eslintcache
bun-webkit
src/deps/c-ares/build
src/bun.js/bindings-obj
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
test/js/bun/glob/fixtures
test/node.js/upstream
tsconfig.tsbuildinfo
txt.js
x64
yarn.lock
zig-cache
zig-out
test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
tmp
myscript.sh
# Dependencies
/vendor
cold-jsc-start
cold-jsc-start.d
# Dependencies (before CMake)
# These can be removed in the far future
/src/bun.js/WebKit
/src/deps/boringssl
/src/deps/brotli
/src/deps/c*ares
/src/deps/libarchive
/src/deps/libdeflate
/src/deps/libuv
/src/deps/lol*html
/src/deps/ls*hpack
/src/deps/mimalloc
/src/deps/picohttpparser
/src/deps/tinycc
/src/deps/WebKit
/src/deps/zig
/src/deps/zlib
/src/deps/zstd
# Generated files
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}
*.bun-build
/test.ts

71
.gitmodules vendored Normal file
View File

@@ -0,0 +1,71 @@
[submodule "src/deps/picohttpparser"]
path = src/deps/picohttpparser
url = https://github.com/h2o/picohttpparser.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/javascript/jsc/WebKit"]
path = src/bun.js/WebKit
url = https://github.com/oven-sh/WebKit.git
ignore = dirty
depth = 1
update = none
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/mimalloc"]
path = src/deps/mimalloc
url = https://github.com/Jarred-Sumner/mimalloc.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/zlib"]
path = src/deps/zlib
url = https://github.com/cloudflare/zlib.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/libarchive"]
path = src/deps/libarchive
url = https://github.com/libarchive/libarchive.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/boringssl"]
path = src/deps/boringssl
url = https://github.com/oven-sh/boringssl.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/lol-html"]
path = src/deps/lol-html
url = https://github.com/cloudflare/lol-html
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/uws"]
path = src/deps/uws
url = https://github.com/Jarred-Sumner/uWebSockets
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = true
[submodule "src/deps/tinycc"]
path = src/deps/tinycc
url = https://github.com/Jarred-Sumner/tinycc.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty

View File

@@ -1,4 +0,0 @@
# command script import vendor/zig/tools/lldb_pretty_printers.py
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"

View File

@@ -1,2 +0,0 @@
# To learn more about git's mailmap: https://ntietz.com/blog/git-mailmap-for-name-changes
chloe caruso <git@paperclover.net> <me@paperdave.net>

View File

@@ -1,10 +1,13 @@
src/fallback.html
src/bun.js/WebKit
vendor
test/snapshots
test/js/deno
test/node.js
src/js/out
src/*.out.js
src/*out.*.js
src/deps
src/test/fixtures
src/react-refresh.js
*.min.js
test/snippets
test/js/node/test
bun.lock
test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts
bench/react-hello-world/react-hello-world.node.js

View File

@@ -1,24 +0,0 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false,
"quoteProps": "preserve",
"overrides": [
{
"files": [".vscode/*.json"],
"options": {
"parser": "jsonc",
"quoteProps": "preserve",
"singleQuote": false,
"trailingComma": "all"
}
},
{
"files": ["*.md"],
"options": {
"printWidth": 80
}
}
]
}

15
.prettierrc.cjs Normal file
View File

@@ -0,0 +1,15 @@
module.exports = {
arrowParens: "avoid",
printWidth: 120,
trailingComma: "all",
useTabs: false,
quoteProps: "preserve",
overrides: [
{
files: "README.md",
options: {
printWidth: 80,
},
},
],
};

13
.scripts/postinstall.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
set -euxo pipefail
# if bun-webkit node_modules directory exists
if [ -d ./node_modules/bun-webkit ]; then
rm -f bun-webkit
# get the first matching bun-webkit-* directory name
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
fi
# sets up vscode C++ intellisense
rm -f .vscode/clang++
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null

View File

@@ -1,18 +1,17 @@
#!/bin/bash
set -exo pipefail
set -euxo pipefail
WEBKIT_VERSION=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')')
WEBKIT_VERSION=$(git rev-parse HEAD:./src/bun.js/WebKit)
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
ZSTD=$(git rev-parse HEAD:./src/deps/zstd)
LSHPACK=$(git rev-parse HEAD:./src/deps/ls-hpack)
LIBDEFLATE=$(git rev-parse HEAD:./src/deps/libdeflate)
USOCKETS=$(cd src/deps/uws/uSockets && git rev-parse HEAD)
rm -rf src/generated_versions_list.zig
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
@@ -21,15 +20,13 @@ echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_li
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
echo "pub const libdeflate = \"$LIBDEFLATE\";" >>src/generated_versions_list.zig
echo "pub const zstd = \"$ZSTD\";" >>src/generated_versions_list.zig
echo "pub const lshpack = \"$LSHPACK\";" >>src/generated_versions_list.zig
echo "" >>src/generated_versions_list.zig
zig fmt src/generated_versions_list.zig

View File

@@ -1,2 +0,0 @@
[type.md]
extend-ignore-words-re = ["^ba"]

View File

@@ -1,103 +1,49 @@
{
"configurations": [
{
"name": "Debug",
"name": "Mac",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"compileCommands": "${workspaceFolder}/build/debug/compile_commands.json",
"includePath": [
"${workspaceFolder}/build/bun-webkit/include",
"${workspaceFolder}/build/debug/codegen",
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/vendor/boringssl/include/",
"${workspaceFolder}/vendor",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/",
],
"browse": {
"path": [
"${workspaceFolder}/build/bun-webkit/include",
"${workspaceFolder}/src/bun.js/bindings",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/vendor/*",
"${workspaceFolder}/vendor/boringssl/include/*",
"${workspaceFolder}/packages/bun-usockets/*",
"${workspaceFolder}/packages/bun-uws/*",
"${workspaceFolder}/src/napi/*",
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb",
},
"defines": [
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
"STATICALLY_LINKED_WITH_WTF=1",
"BUILDING_WITH_CMAKE=1",
"NOMINMAX",
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
"BUILDING_JSCONLY__",
"USE_FOUNDATION=1",
"ASSERT_ENABLED=1",
"DU_DISABLE_RENAMING=1",
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"cStandard": "c17",
"cppStandard": "c++20",
},
{
"name": "BunWithJSCDebug",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/build/debug/codegen",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/vendor/boringssl/include/",
"${workspaceFolder}/vendor",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
],
"browse": {
"path": [
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers/**",
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/vendor",
"${workspaceFolder}/vendor/boringssl/include/",
"${workspaceFolder}/packages/bun-usockets/",
"${workspaceFolder}/packages/bun-uws/",
"${workspaceFolder}/src/napi",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb_debug",
"databaseFilename": ".vscode/cppdb"
},
"defines": [
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
@@ -107,14 +53,14 @@
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
"BUILDING_JSCONLY__",
"USE_FOUNDATION=1",
"ASSERT_ENABLED=1",
"DU_DISABLE_RENAMING=1",
"ASSERT_ENABLED=0",
"DU_DISABLE_RENAMING=1"
],
"macFrameworkPath": [],
"compilerPath": "${workspaceFolder}/.vscode/clang++",
"cStandard": "c17",
"cppStandard": "c++20",
},
"cppStandard": "c++20"
}
],
"version": 4,
"version": 4
}

View File

@@ -1,33 +1,10 @@
{
"recommendations": [
// Zig
"ziglang.vscode-zig",
// C/C++
"clang.clangd",
"ms-vscode.cmake-tools",
"esbenp.prettier-vscode",
"xaver.clang-format",
"vadimcn.vscode-lldb",
// JavaScript
"oven.bun-vscode",
"esbenp.prettier-vscode",
// TypeScript
"better-ts-errors.better-ts-errors",
"MylesMurphy.prettify-ts",
// Markdown
"bierner.markdown-preview-github-styles",
"bierner.markdown-emoji",
"bierner.emojisense",
"bierner.markdown-checkbox",
"bierner.jsdoc-markdown-highlighting",
// TOML
"tamasfe.even-better-toml",
// Other
"bierner.comment-tagged-templates",
],
"ms-vscode.cpptools"
]
}

1353
.vscode/launch.json generated vendored

File diff suppressed because it is too large Load Diff

301
.vscode/settings.json vendored
View File

@@ -1,130 +1,56 @@
{
// Editor
"editor.tabSize": 2,
"editor.insertSpaces": true,
"editor.formatOnSave": true,
"editor.formatOnSaveMode": "file",
// Search
"git.autoRepositoryDetection": "openEditors",
"search.quickOpen.includeSymbols": false,
"search.seedWithNearestWord": true,
"search.smartCase": true,
"search.exclude": {
"node_modules": true,
".git": true,
"vendor/*/**": true,
"test/node.js/upstream": true,
// This will fill up your whole search history.
"test/js/node/test/fixtures": true,
"test/js/node/test/common": true,
},
"search.exclude": {},
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
// Git
"git.autoRepositoryDetection": "openEditors",
"git.ignoreSubmodules": true,
"git.ignoreLimitWarning": true,
// Zig
"zig.initialSetupDone": true,
"zig.buildOnSave": false,
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"],
"zig.zls.buildOnSaveStep": "check",
// "zig.zls.enableBuildOnSave": true,
// "zig.buildOnSave": true,
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
"zig.formattingProvider": "zls",
"zig.zls.enableInlayHints": false,
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "ziglang.vscode-zig",
"editor.formatOnSave": true
},
// lldb
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
"lldb.verboseLogging": false,
// C++
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"[cpp]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"[c]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"[h]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["--header-insertion=never"],
// JavaScript
"prettier.enable": true,
"prettier.configPath": ".prettierrc",
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
"[javascript]": {
"[ts]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[javascriptreact]": {
"[js]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"prettier.prettierPath": "./node_modules/prettier",
"zig.zls.enableInlayHints": false,
// TypeScript
"typescript.tsdk": "node_modules/typescript/lib",
"[typescript]": {
"[jsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[typescriptreact]": {
"[tsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
// JSON
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[yaml]": {
"editor.formatOnSave": true
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},
// Markdown
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.unicodeHighlight.ambiguousCharacters": true,
"editor.unicodeHighlight.invisibleCharacters": true,
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
"comments": "off",
"strings": "off",
"other": "off",
},
"other": "off"
}
},
// TOML
"[toml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},
// YAML
"[yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},
// Docker
"[dockerfile]": {
"editor.formatOnSave": false,
},
// Files
"lldb.verboseLogging": false,
"files.exclude": {
"**/.git": true,
"**/.svn": true,
@@ -134,34 +60,165 @@
"**/Thumbs.db": true,
"**/*.xcworkspacedata": true,
"**/*.xcscheme": true,
"**/*.pem": true,
"**/*.xcodeproj": true,
"**/*.i": true,
// uws WebSocket.cpp conflicts with webcore WebSocket.cpp
"packages/bun-uws/fuzzing": true,
},
"files.associations": {
"*.idl": "cpp",
"*.mdc": "markdown",
"array": "cpp",
"test/snapshots": true,
"test/snapshots-no-hmr": true,
"src/bun.js/WebKit": true,
"src/deps/libarchive": true,
"src/deps/mimalloc": true,
"src/deps/s2n-tls": true,
"src/deps/boringssl": true,
"src/deps/openssl": true,
"src/deps/uws": true,
"src/deps/zlib": true,
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
"WebKit/JSTests": true,
"WebKit/Tools": true,
"WebKit/WebDriverTests": true,
"WebKit/WebKit.xcworkspace": true,
"WebKit/WebKitLibraries": true,
"WebKit/Websites": true,
"WebKit/resources": true,
"WebKit/LayoutTests": true,
"WebKit/ManualTests": true,
"WebKit/PerformanceTests": true,
"WebKit/WebKitLegacy": true,
"WebKit/WebCore": true,
"WebKit/WebDriver": true,
"WebKit/WebKitBuild": true,
"WebKit/WebInspectorUI": true,
"src/bun.js/WebKit/JSTests": true,
"src/bun.js/WebKit/Tools": true,
"src/bun.js/WebKit/WebDriverTests": true,
"src/bun.js/WebKit/WebKit.xcworkspace": true,
"src/bun.js/WebKit/WebKitLibraries": true,
"src/bun.js/WebKit/Websites": true,
"src/bun.js/WebKit/resources": true,
"src/bun.js/WebKit/LayoutTests": true,
"src/bun.js/WebKit/ManualTests": true,
"src/bun.js/WebKit/PerformanceTests": true,
"src/bun.js/WebKit/WebKitLegacy": true,
"src/bun.js/WebKit/WebCore": true,
"src/bun.js/WebKit/WebDriver": true,
"src/bun.js/WebKit/WebKitBuild": true,
"src/bun.js/WebKit/WebInspectorUI": true
},
"git.detectSubmodules": false,
"[cpp]": {
"editor.defaultFormatter": "xaver.clang-format"
},
"[h]": {
"editor.defaultFormatter": "xaver.clang-format"
},
"[c]": {
"editor.defaultFormatter": "xaver.clang-format"
},
"files.associations": {
"*.lock": "yarnlock",
"*.idl": "cpp",
"memory": "cpp",
"iostream": "cpp",
"algorithm": "cpp",
"random": "cpp",
"ios": "cpp",
"filesystem": "cpp",
"__locale": "cpp",
"type_traits": "cpp",
"__mutex_base": "cpp",
"__string": "cpp",
"string": "cpp",
"string_view": "cpp",
"typeinfo": "cpp",
"__config": "cpp",
"__nullptr": "cpp",
"exception": "cpp",
"__bit_reference": "cpp",
"atomic": "cpp",
"utility": "cpp",
"sstream": "cpp",
"__functional_base": "cpp",
"new": "cpp",
"__debug": "cpp",
"__errc": "cpp",
"__hash_table": "cpp",
"__node_handle": "cpp",
"__split_buffer": "cpp",
"__threading_support": "cpp",
"__tuple": "cpp",
"array": "cpp",
"bit": "cpp",
"bitset": "cpp",
"cctype": "cpp",
"chrono": "cpp",
"clocale": "cpp",
"cmath": "cpp",
"complex": "cpp",
"condition_variable": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdint": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"deque": "cpp",
"fstream": "cpp",
"functional": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"istream": "cpp",
"iterator": "cpp",
"limits": "cpp",
"locale": "cpp",
"mutex": "cpp",
"optional": "cpp",
"ostream": "cpp",
"ratio": "cpp",
"stack": "cpp",
"stdexcept": "cpp",
"streambuf": "cpp",
"system_error": "cpp",
"thread": "cpp",
"tuple": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"vector": "cpp",
"__bits": "cpp",
"__tree": "cpp",
"map": "cpp",
"numeric": "cpp",
"set": "cpp",
"__memory": "cpp",
"memory_resource": "cpp",
"resource.h": "c",
"sysinfo.h": "c",
"*.tcc": "cpp",
"list": "cpp",
"shared_mutex": "cpp",
"cinttypes": "cpp",
"variant": "cpp",
"sysctl.h": "c",
"interface_adresses.h": "c",
"interface_addresses.h": "c",
"ctype.h": "c",
"ethernet.h": "c",
"inet.h": "c",
"packet.h": "c",
"queue": "cpp",
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"eslint.workingDirectories": ["packages/bun-types"],
"typescript.tsdk": "node_modules/typescript/lib"
}

77
.vscode/tasks.json vendored
View File

@@ -2,58 +2,33 @@
"version": "2.0.0",
"tasks": [
{
"label": "Build Bun",
"type": "shell",
"command": "bun run build",
"label": "build",
"type": "process",
"command": "zig",
"args": ["build"],
"presentation": {
"echo": true,
"reveal": "silent",
"focus": false,
"panel": "shared",
"showReuseMessage": false,
"clear": false
},
"group": {
"kind": "build",
"isDefault": true,
},
"problemMatcher": [
{
"owner": "zig",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s+(.+)$",
"message": 1,
"loop": true,
},
],
},
{
"owner": "clang",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s*(.*)$",
"message": 1,
"loop": true,
},
],
},
],
"presentation": {
"reveal": "always",
"panel": "shared",
"clear": true,
},
"isDefault": true
}
},
],
{
"label": "run",
"type": "process",
"command": "zig",
"args": ["run", "${file}"],
"group": "build",
"presentation": {
"showReuseMessage": false,
"clear": true
}
}
]
}

View File

@@ -1,57 +0,0 @@
cmake_minimum_required(VERSION 3.24)
message(STATUS "Configuring Bun")
list(APPEND CMAKE_MODULE_PATH
${CMAKE_SOURCE_DIR}/cmake
${CMAKE_SOURCE_DIR}/cmake/targets
${CMAKE_SOURCE_DIR}/cmake/tools
${CMAKE_SOURCE_DIR}/cmake/analysis
${CMAKE_SOURCE_DIR}/cmake/scripts
)
include(Policies)
include(Globals)
if (CMAKE_HOST_WIN32)
# Workaround for TLS certificate verification issue on Windows when downloading from GitHub
# Remove this once we've bumped the CI machines build image
set(CMAKE_TLS_VERIFY 0)
endif()
# --- Compilers ---
if(CMAKE_HOST_APPLE)
include(SetupMacSDK)
endif()
include(SetupLLVM)
include(SetupCcache)
# --- Project ---
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
project(Bun VERSION ${VERSION})
include(Options)
include(CompilerFlags)
# --- Tools ---
include(SetupGit)
include(SetupBuildkite)
include(SetupBun)
include(SetupEsbuild)
include(SetupZig)
include(SetupRust)
# --- Targets ---
include(BuildBun)
# --- Analysis ---
if(ENABLE_ANALYSIS)
include(RunClangFormat)
include(RunClangTidy)
include(RunZigFormat)
include(RunPrettier)
endif()

View File

@@ -1,304 +1,92 @@
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
# Contributing to Bun
If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows)
> **Important:** All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
## Install Dependencies
## Bun's codebase
Using your system's package manager, install Bun's dependencies:
Bun is written mostly in Zig, but WebKit & JavaScriptCore (the JavaScript engine) is written in C++.
{% codetabs group="os" %}
Today (February 2023), Bun's codebase has five distinct parts:
```bash#macOS (Homebrew)
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
```
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
- Package manager ([`src/install/`](src/install/))
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
```bash#Ubuntu/Debian
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
```
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in [`src/js_parser.zig`](src/js_parser.zig). The JavaScript AST data structures are mostly in [`src/js_ast.zig`](src/js_ast.zig). The JavaScript lexer is in [`src/js_lexer.zig`](src/js_lexer.zig). A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
```bash#Arch
$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
```
## Getting started
```bash#Fedora
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
```
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
```
## Memory management in Bun
{% /codetabs %}
For the Zig code, please:
> **Note**: The Zig compiler is automatically installed and updated by the build scripts. Manual installation is not required.
1. Do your best to avoid dynamically allocating memory.
2. If we need to allocate memory, carefully consider the owner of that memory. If it's a JavaScript object, it will need a finalizer. If it's in Zig, it will need to be freed either via an arena or manually.
3. Prefer arenas over manual memory management. Manually freeing memory is leak & crash prone.
4. If the memory needs to be accessed across threads, use `bun.default_allocator`. Mimalloc threadlocal heaps are not safe to free across threads.
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
The JavaScript transpiler has special-handling for memory management. The parser allocates into a single arena and the memory is recycled after each parse.
{% codetabs %}
## JavaScript runtime
```bash#Native
$ curl -fsSL https://bun.sh/install | bash
```
Most of Bun's JavaScript runtime code lives in [`src/bun.js`](src/bun.js).
```bash#npm
$ npm install -g bun
```
### Calling C++ from Zig & Zig from C++
```bash#Homebrew
$ brew tap oven-sh/bun
$ brew install bun
```
TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and [`bindings.cpp`](src/bun.js/bindings/bindings.cpp) for now)
{% /codetabs %}
### Adding a new JavaScript class
## Install LLVM
1. Add a new file in [`src/bun.js/*.classes.ts`](src/bun.js) to define the instance and static methods for the class.
2. Add a new file in [`src/bun.js/**/*.zig`](src/bun.js) and expose the struct in [`src/bun.js/generated_classes_list.zig`](src/bun.js/generated_classes_list.zig)
3. Run `make codegen`
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
Copy from examples like `Subprocess` or `Response`.
{% codetabs group="os" %}
### ESM modules
```bash#macOS (Homebrew)
$ brew install llvm@18
```
Bun implements ESM modules in a mix of native code and JavaScript.
```bash#Ubuntu/Debian
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 18 all
```
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
```bash#Arch
$ sudo pacman -S llvm clang18 lld
```
The ESM modules in Bun are located in [`src/bun.js/*.exports.js`](src/bun.js/). Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
```bash#Fedora
$ sudo dnf install llvm18 clang18 lld18-devel
```
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
```bash#openSUSE Tumbleweed
$ sudo zypper install clang18 lld18 llvm18
```
### JavaScript Builtins
{% /codetabs %}
TODO: update this with the new build process that uses TypeScript and `$` instead of `@`.
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-18.1.8).
JavaScript builtins are located in [`src/js/builtins/*.ts`](src/js/builtins).
Make sure Clang/LLVM 18 is in your path:
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
```bash
$ which clang-18
```
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
If not, run this to manually add it:
{% codetabs group="os" %}
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
# use path+="$(brew --prefix llvm@18)/bin" if you are using zsh
$ export PATH="$(brew --prefix llvm@18)/bin:$PATH"
```
```bash#Arch
# use fish_add_path if you're using fish
$ export PATH="$PATH:/usr/lib/llvm18/bin"
```
{% /codetabs %}
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
## Building Bun
After cloning the repository, run the following command to build. This may take a while as it will clone submodules and build dependencies.
```bash
$ bun run build
```
The binary will be located at `./build/debug/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun.
```bash
$ build/debug/bun-debug --version
x.y.z_debug
```
## VSCode
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./vendor/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension).
We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-debug` in your terminal:
To regenerate the builtins:
```sh
$ bun-debug
make clean-bindings && make generate-builtins && make bindings -j10
```
## Code generation scripts
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
### Memory management in Bun's JavaScript runtime
In particular, these are:
TODO: fill this out (for now, use `JSC.Strong` in most cases)
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
### Strings
## Modifying ESM modules
TODO: fill this out (for now, use `JSValue.toSlice()` in most cases)
Certain modules like `node:fs`, `node:stream`, `bun:sqlite`, and `ws` are implemented in JavaScript. These live in `src/js/{node,bun,thirdparty}` files and are pre-bundled using Bun.
#### JavaScriptCore C API
## Release build
Do not copy from examples leveraging the JavaScriptCore C API. Please do not use this in new code. We will not accept PRs that add new code that uses the JavaScriptCore C API.
To compile a release build of Bun, run:
## Testing
```bash
$ bun run build:release
```
The binary will be located at `./build/release/bun` and `./build/release/bun-profile`.
### Download release build from pull requests
To save you time spent building a release build locally, we provide a way to run release builds from pull requests. This is useful for manually testing changes in a release build before they are merged.
To run a release build from a pull request, you can use the `bun-pr` npm package:
```sh
bunx bun-pr <pr-number>
bunx bun-pr <branch-name>
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
```
This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`.
```sh
bun-1234566 --version
```
This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub.
## Valgrind
On Linux, valgrind can help find memory issues.
Keep in mind:
- JavaScriptCore doesn't support valgrind. It will report spurious errors.
- Valgrind is slow
- Mimalloc will sometimes cause spurious errors when debug build is enabled
You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You may need to manually compile Valgrind instead of using it from your Linux package manager.
`--fair-sched=try` is necessary if running multithreaded code in Bun (such as the bundler). Otherwise it will hang.
```bash
$ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
```
## Building WebKit locally + Debug mode of JSC
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
```bash
# Clone WebKit into ./vendor/WebKit
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
# Optionally, you can use `make jsc` for a release build
$ make jsc-debug
# Build bun with the local JSC build
$ bun run build:local
```
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
## Troubleshooting
### 'span' file not found on Ubuntu
> ⚠️ Please note that the instructions below are specific to issues occurring on Ubuntu. It is unlikely that the same issues will occur on other Linux distributions.
The Clang compiler typically uses the `libstdc++` C++ standard library by default. `libstdc++` is the default C++ Standard Library implementation provided by the GNU Compiler Collection (GCC). While Clang may link against the `libc++` library, this requires explicitly providing the `-stdlib` flag when running Clang.
Bun relies on C++20 features like `std::span`, which are not available in GCC versions lower than 11. GCC 10 doesn't have all of the C++20 features implemented. As a result, running `make setup` may fail with the following error:
```
fatal error: 'span' file not found
#include <span>
^~~~~~
```
The issue may manifest when initially running `bun setup` as Clang being unable to compile a simple program:
```
The C++ compiler
"/usr/bin/clang++-18"
is not able to compile a simple test program.
```
To fix the error, we need to update the GCC version to 11. To do this, we'll need to check if the latest version is available in the distribution's official repositories or use a third-party repository that provides GCC 11 packages. Here are general steps:
```bash
$ sudo apt update
$ sudo apt install gcc-11 g++-11
# If the above command fails with `Unable to locate package gcc-11` we need
# to add the APT repository
$ sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
# Now run `apt install` again
$ sudo apt install gcc-11 g++-11
```
Now, we need to set GCC 11 as the default compiler:
```bash
$ sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 100
$ sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 100
```
### libarchive
If you see an error on macOS when compiling `libarchive`, run:
```bash
$ brew install pkg-config
```
### macOS `library not found for -lSystem`
If you see this error when compiling, run:
```bash
$ xcode-select --install
```
### Cannot find `libatomic.a`
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
```
The built version of Bun may not work on other systems if compiled this way.
### ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
```bash
brew uninstall ccache
brew install ccache
```
## Using bun-debug
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`
See [`test/README.md`](test/README.md) for information on how to run tests.

663
Dockerfile Normal file
View File

@@ -0,0 +1,663 @@
ARG DEBIAN_FRONTEND=noninteractive
ARG GITHUB_WORKSPACE=/build
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG CPU_TARGET=native
ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=may20
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.6
FROM bitnami/minideb:bullseye as bun-base
RUN install_packages ca-certificates curl wget lsb-release software-properties-common gnupg gnupg1 gnupg2
RUN wget https://apt.llvm.org/llvm.sh && \
chmod +x llvm.sh && \
./llvm.sh 15
RUN install_packages \
cmake \
curl \
file \
git \
gnupg \
libc-dev \
libxml2 \
libxml2-dev \
make \
ninja-build \
perl \
python3 \
rsync \
ruby \
unzip \
xz-utils \
bash tar gzip ccache
ENV CXX=clang++-15
ENV CC=clang-15
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
install_packages nodejs && \
npm install -g esbuild
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_URL
ARG ZIG_URL
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ENV WEBKIT_OUT_DIR=${WEBKIT_DIR}
ENV BUILDARCH=${BUILDARCH}
ENV AR=/usr/bin/llvm-ar-15
ENV ZIG "${ZIG_PATH}/zig"
ENV PATH="$ZIG/bin:$PATH"
ENV LD=lld-15
RUN mkdir -p $BUN_DIR $BUN_DEPS_OUT_DIR
FROM bun-base as bun-base-with-zig-and-webkit
WORKDIR $GITHUB_WORKSPACE
ADD $ZIG_URL .
RUN tar xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && mv ${ZIG_FOLDERNAME} zig;
WORKDIR $GITHUB_WORKSPACE
ARG GITHUB_WORKSPACE
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_URL
ARG ZIG_URL
ARG WEBKIT_BASENAME
ADD ${WEBKIT_URL} .
RUN mkdir -p ${WEBKIT_DIR} && cd ${GITHUB_WORKSPACE} && \
gunzip ${WEBKIT_BASENAME}.tar.gz && tar -xf ${WEBKIT_BASENAME}.tar && \
cat ${WEBKIT_DIR}/include/cmakeconfig.h > /dev/null
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
LABEL org.opencontainers.image.source=https://github.com/oven-sh/bun
FROM bun-base as c-ares
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make c-ares && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
FROM bun-base as lolhtml
RUN install_packages build-essential && curl https://sh.rustup.rs -sSf | sh -s -- -y
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-15) && cd ${BUN_DIR} && \
make lolhtml && rm -rf src/deps/lol-html Makefile
FROM bun-base as mimalloc
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
make mimalloc && rm -rf src/deps/mimalloc Makefile
FROM bun-base as zlib
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
make zlib && rm -rf src/deps/zlib Makefile
FROM bun-base as libarchive
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages autoconf automake libtool pkg-config
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
ENV CCACHE_DIR=/ccache
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
make libarchive && rm -rf src/deps/libarchive Makefile
FROM bun-base as tinycc
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages libtcc-dev && cp /usr/lib/$(uname -m)-linux-gnu/libtcc.a ${BUN_DEPS_OUT_DIR}
FROM bun-base as boringssl
RUN install_packages golang
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && make boringssl && rm -rf src/deps/boringssl Makefile
FROM bun-base as uws
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/uws ${BUN_DIR}/src/deps/uws
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
COPY src/deps/libuwsockets.cpp ${BUN_DIR}/src/deps/libuwsockets.cpp
COPY src/deps/_libusockets.h ${BUN_DIR}/src/deps/_libusockets.h
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make uws && rm -rf src/deps/uws Makefile
FROM bun-base as picohttp
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/picohttpparser ${BUN_DIR}/src/deps/picohttpparser
COPY src/deps/*.c ${BUN_DIR}/src/deps/
COPY src/deps/*.h ${BUN_DIR}/src/deps/
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make picohttp
FROM bun-base-with-zig-and-webkit as identifier_cache
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/js_lexer/identifier_data.zig ${BUN_DIR}/src/js_lexer/identifier_data.zig
COPY src/js_lexer/identifier_cache.zig ${BUN_DIR}/src/js_lexer/identifier_cache.zig
RUN cd $BUN_DIR && \
make identifier-cache && rm -rf zig-cache Makefile
FROM bun-base-with-zig-and-webkit as node_fallbacks
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/node-fallbacks ${BUN_DIR}/src/node-fallbacks
RUN cd $BUN_DIR && \
make node-fallbacks && rm -rf src/node-fallbacks/node_modules Makefile
FROM bun-base-with-zig-and-webkit as prepare_release
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY ./root.zig ${BUN_DIR}/root.zig
COPY ./src ${BUN_DIR}/src
COPY ./build.zig ${BUN_DIR}/build.zig
COPY ./completions ${BUN_DIR}/completions
COPY ./packages ${BUN_DIR}/packages
COPY ./src/build-id ${BUN_DIR}/src/build-id
COPY ./package.json ${BUN_DIR}/package.json
COPY ./misctools ${BUN_DIR}/misctools
COPY Makefile ${BUN_DIR}/Makefile
FROM prepare_release as compile_release_obj
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
ARG ARCH
ARG TRIPLET
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ARG GIT_SHA
ARG BUN_BASE_VERSION
ENV BUN_BASE_VERSION=${BUN_BASE_VERSION}
ENV GIT_SHA=${GIT_SHA}
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
COPY ./src/build-id ${BUN_DIR}/src/build-id
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
mkdir -p $BUN_RELEASE_DIR && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Doptimize=ReleaseFast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
FROM scratch as build_release_obj
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG GIT_SHA
ARG TRIPLET
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY --from=compile_release_obj /tmp/bun-${TRIPLET}-${GIT_SHA}/*.o /
FROM prepare_release as compile_cpp
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
# Required for webcrypto bindings
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && \
make release-bindings -j10 && mv src/bun.js/bindings-obj/* /tmp
FROM bun-base as sqlite
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
FROM scratch as build_release_cpp
COPY --from=compile_cpp /tmp/*.o /
FROM prepare_release as build_release
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=build_release_obj /*.o /tmp
COPY --from=build_release_cpp /*.o ${BUN_DIR}/src/bun.js/bindings-obj/
COPY --from=build_release_cpp /*.a ${BUN_DEPS_OUT_DIR}/
RUN cd $BUN_DIR && mkdir -p ${BUN_RELEASE_DIR} && make bun-relink copy-to-bun-release-dir && \
rm -rf $HOME/.cache zig-cache misctools package.json build-id completions build.zig $(BUN_DIR)/packages
FROM scratch as artifact
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
COPY --from=build_release ${BUN_RELEASE_DIR}/bun /bun
COPY --from=build_release ${BUN_RELEASE_DIR}/bun-profile /bun-profile
COPY --from=build_release ${BUN_DEPS_OUT_DIR}/* /bun-dependencies
COPY --from=build_release_obj /*.o /bun-obj
FROM prepare_release as build_unit
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
WORKDIR $BUN_DIR
ENV PATH "$ZIG_PATH:$PATH"
ENV LIB_ICU_PATH "${WEBKIT_DIR}/lib"
CMD make headers \
api \
analytics \
bun_error \
fallback_decoder \
bindings -j10 && \
make \
run-all-unit-tests
# FROM bun-test-base as test_base
# ARG DEBIAN_FRONTEND=noninteractive
# ARG GITHUB_WORKSPACE=/build
# ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
# # Directory extracts to "bun-webkit"
# ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
# ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
# ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
# ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
# ARG BUILDARCH=amd64
# RUN groupadd -r chromium && useradd -d ${BUN_DIR} -M -r -g chromium -G audio,video chromium \
# && mkdir -p /home/chromium/Downloads && chown -R chromium:chromium /home/chromium
# USER chromium
# WORKDIR $BUN_DIR
# ENV NPM_CLIENT bun
# ENV PATH "${BUN_DIR}/packages/bun-linux-x64:${BUN_DIR}/packages/bun-linux-aarch64:$PATH"
# ENV CI 1
# ENV BROWSER_EXECUTABLE /usr/bin/chromium
# COPY ./test ${BUN_DIR}/test
# COPY Makefile ${BUN_DIR}/Makefile
# COPY package.json ${BUN_DIR}/package.json
# COPY .docker/run-test.sh ${BUN_DIR}/run-test.sh
# COPY ./bun.lockb ${BUN_DIR}/bun.lockb
# # # We don't want to worry about architecture differences in this image
# COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-aarch64/bun
# COPY --from=release /opt/bun/bin/bun ${BUN_DIR}/packages/bun-linux-x64/bun
# USER root
# RUN chgrp -R chromium ${BUN_DIR} && chmod g+rwx ${BUN_DIR} && chown -R chromium:chromium ${BUN_DIR}
# USER chromium
# CMD [ "bash", "run-test.sh" ]
# FROM release

1
LATEST
View File

@@ -1 +0,0 @@
1.2.4

View File

@@ -1,73 +0,0 @@
Bun itself is MIT-licensed.
## JavaScriptCore
Bun statically links JavaScriptCore (and WebKit) which is LGPL-2 licensed. WebCore files from WebKit are also licensed under LGPL2. Per LGPL2:
> (1) If you statically link against an LGPLd library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application.
You can find the patched version of WebKit used by Bun here: <https://github.com/oven-sh/webkit>. If you would like to relink Bun with changes:
- `git submodule update --init --recursive`
- `make jsc`
- `zig build`
This compiles JavaScriptCore, compiles Buns `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
## Linked libraries
Bun statically links these libraries:
| Library | License |
|---------|---------|
| [`boringssl`](https://boringssl.googlesource.com/boringssl/) | [several licenses](https://boringssl.googlesource.com/boringssl/+/refs/heads/master/LICENSE) |
| [`brotli`](https://github.com/google/brotli) | MIT |
| [`libarchive`](https://github.com/libarchive/libarchive) | [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING) |
| [`lol-html`](https://github.com/cloudflare/lol-html/tree/master/c-api) | BSD 3-Clause |
| [`mimalloc`](https://github.com/microsoft/mimalloc) | MIT |
| [`picohttp`](https://github.com/h2o/picohttpparser) | dual-licensed under the Perl License or the MIT License |
| [`zstd`](https://github.com/facebook/zstd) | dual-licensed under the BSD License or GPLv2 license |
| [`simdutf`](https://github.com/simdutf/simdutf) | Apache 2.0 |
| [`tinycc`](https://github.com/tinycc/tinycc) | LGPL v2.1 |
| [`uSockets`](https://github.com/uNetworking/uSockets) | Apache 2.0 |
| [`zlib-cloudflare`](https://github.com/cloudflare/zlib) | zlib |
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
## Polyfills
For compatibility reasons, the following packages are embedded into Bun's binary and injected if imported.
| Package | License |
|---------|---------|
| [`assert`](https://npmjs.com/package/assert) | MIT |
| [`browserify-zlib`](https://npmjs.com/package/browserify-zlib) | MIT |
| [`buffer`](https://npmjs.com/package/buffer) | MIT |
| [`constants-browserify`](https://npmjs.com/package/constants-browserify) | MIT |
| [`crypto-browserify`](https://npmjs.com/package/crypto-browserify) | MIT |
| [`domain-browser`](https://npmjs.com/package/domain-browser) | MIT |
| [`events`](https://npmjs.com/package/events) | MIT |
| [`https-browserify`](https://npmjs.com/package/https-browserify) | MIT |
| [`os-browserify`](https://npmjs.com/package/os-browserify) | MIT |
| [`path-browserify`](https://npmjs.com/package/path-browserify) | MIT |
| [`process`](https://npmjs.com/package/process) | MIT |
| [`punycode`](https://npmjs.com/package/punycode) | MIT |
| [`querystring-es3`](https://npmjs.com/package/querystring-es3) | MIT |
| [`stream-browserify`](https://npmjs.com/package/stream-browserify) | MIT |
| [`stream-http`](https://npmjs.com/package/stream-http) | MIT |
| [`string_decoder`](https://npmjs.com/package/string_decoder) | MIT |
| [`timers-browserify`](https://npmjs.com/package/timers-browserify) | MIT |
| [`tty-browserify`](https://npmjs.com/package/tty-browserify) | MIT |
| [`url`](https://npmjs.com/package/url) | MIT |
| [`util`](https://npmjs.com/package/util) | MIT |
| [`vm-browserify`](https://npmjs.com/package/vm-browserify) | MIT |
## Additional credits
- Bun's JS transpiler, CSS lexer, and Node.js module resolver source code is a Zig port of [@evanw](https://github.com/evanw)s [esbuild](https://github.com/evanw/esbuild) project.
- Credit to [@kipply](https://github.com/kipply) for the name "Bun"!

472
Makefile

File diff suppressed because it is too large Load Diff

326
README.md
View File

@@ -1,5 +1,5 @@
<p align="center">
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
</p>
<h1 align="center">Bun</h1>
@@ -24,36 +24,37 @@
## What is Bun?
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
> **Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keeps tabs on future releases.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as **a drop-in replacement for Node.js**. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
```bash
bun run index.tsx # TS and JSX supported out-of-the-box
bun run index.tsx # TS and JSX supported out of the box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
```bash
bun test # run tests
bun run start # run the `start` script in `package.json`
bun install <pkg> # install a package
bun install <pkg> # install a package
bunx cowsay 'Hello, world!' # execute a package
```
## Install
Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
>
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
```sh
# with install script (recommended)
curl -fsSL https://bun.sh/install | bash
# on windows
powershell -c "irm bun.sh/install.ps1 | iex"
# with npm
npm install -g bun
@@ -85,314 +86,49 @@ bun upgrade --canary
## Quick links
- Intro
- [What is Bun?](https://bun.sh/docs/index)
- [Installation](https://bun.sh/docs/installation)
- [Quickstart](https://bun.sh/docs/quickstart)
- [TypeScript](https://bun.sh/docs/typescript)
- Templating
- [`bun init`](https://bun.sh/docs/cli/init)
- [`bun create`](https://bun.sh/docs/cli/bun-create)
- Runtime
- CLI
- [`bun run`](https://bun.sh/docs/cli/run)
- [File types](https://bun.sh/docs/runtime/loaders)
- [TypeScript](https://bun.sh/docs/runtime/typescript)
- [JSX](https://bun.sh/docs/runtime/jsx)
- [Environment variables](https://bun.sh/docs/runtime/env)
- [Bun APIs](https://bun.sh/docs/runtime/bun-apis)
- [Web APIs](https://bun.sh/docs/runtime/web-apis)
- [Node.js compatibility](https://bun.sh/docs/runtime/nodejs-apis)
- [Single-file executable](https://bun.sh/docs/bundler/executables)
- [Plugins](https://bun.sh/docs/runtime/plugins)
- [Watch mode](https://bun.sh/docs/runtime/hot)
- [Module resolution](https://bun.sh/docs/runtime/modules)
- [Auto-install](https://bun.sh/docs/runtime/autoimport)
- [bunfig.toml](https://bun.sh/docs/runtime/bunfig)
- [Debugger](https://bun.sh/docs/runtime/debugger)
- [Framework API](https://bun.sh/docs/runtime/framework)
- Package manager
- [`bun install`](https://bun.sh/docs/cli/install)
- [`bun add`](https://bun.sh/docs/cli/add)
- [`bun remove`](https://bun.sh/docs/cli/remove)
- [`bun update`](https://bun.sh/docs/cli/update)
- [`bun link`](https://bun.sh/docs/cli/link)
- [`bun pm`](https://bun.sh/docs/cli/pm)
- [Global cache](https://bun.sh/docs/install/cache)
- [Workspaces](https://bun.sh/docs/install/workspaces)
- [Lifecycle scripts](https://bun.sh/docs/install/lifecycle)
- [Filter](https://bun.sh/docs/cli/filter)
- [Lockfile](https://bun.sh/docs/install/lockfile)
- [Scopes and registries](https://bun.sh/docs/install/registries)
- [Overrides and resolutions](https://bun.sh/docs/install/overrides)
- Bundler
- [`Bun.build`](https://bun.sh/docs/bundler)
- [Loaders](https://bun.sh/docs/bundler/loaders)
- [Plugins](https://bun.sh/docs/bundler/plugins)
- [Macros](https://bun.sh/docs/bundler/macros)
- [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild)
- Test runner
- [`bun test`](https://bun.sh/docs/cli/test)
- [Writing tests](https://bun.sh/docs/test/writing)
- [Watch mode](https://bun.sh/docs/test/hot)
- [Lifecycle hooks](https://bun.sh/docs/test/lifecycle)
- [Mocks](https://bun.sh/docs/test/mocks)
- [Snapshots](https://bun.sh/docs/test/snapshots)
- [Dates and times](https://bun.sh/docs/test/time)
- [DOM testing](https://bun.sh/docs/test/dom)
- [Code coverage](https://bun.sh/docs/test/coverage)
- Package runner
- [`bun create`](https://bun.sh/docs/cli/create)
- [`bunx`](https://bun.sh/docs/cli/bunx)
- Runtime
- [Runtime](https://bun.sh/docs/runtime/index)
- [Module resolution](https://bun.sh/docs/runtime/modules)
- [Hot &amp; live reloading](https://bun.sh/docs/runtime/hot)
- [Plugins](https://bun.sh/docs/bundler/plugins)
- Ecosystem
- [Node.js](https://bun.sh/docs/ecosystem/nodejs)
- [TypeScript](https://bun.sh/docs/ecosystem/typescript)
- [React](https://bun.sh/docs/ecosystem/react)
- [Elysia](https://bun.sh/docs/ecosystem/elysia)
- [Hono](https://bun.sh/docs/ecosystem/hono)
- [Express](https://bun.sh/docs/ecosystem/express)
- [awesome-bun](https://github.com/apvarun/awesome-bun)
- API
- [HTTP server](https://bun.sh/docs/api/http)
- [HTTP](https://bun.sh/docs/api/http)
- [WebSockets](https://bun.sh/docs/api/websockets)
- [Workers](https://bun.sh/docs/api/workers)
- [Binary data](https://bun.sh/docs/api/binary-data)
- [Streams](https://bun.sh/docs/api/streams)
- [TCP Sockets](https://bun.sh/docs/api/tcp)
- [File I/O](https://bun.sh/docs/api/file-io)
- [import.meta](https://bun.sh/docs/api/import-meta)
- [SQLite](https://bun.sh/docs/api/sqlite)
- [FileSystemRouter](https://bun.sh/docs/api/file-system-router)
- [TCP sockets](https://bun.sh/docs/api/tcp)
- [UDP sockets](https://bun.sh/docs/api/udp)
- [Globals](https://bun.sh/docs/api/globals)
- [$ Shell](https://bun.sh/docs/runtime/shell)
- [Child processes](https://bun.sh/docs/api/spawn)
- [Spawn](https://bun.sh/docs/api/spawn)
- [Transpiler](https://bun.sh/docs/api/transpiler)
- [Hashing](https://bun.sh/docs/api/hashing)
- [Console](https://bun.sh/docs/api/console)
- [FFI](https://bun.sh/docs/api/ffi)
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
- [Testing](https://bun.sh/docs/api/test)
- [Utils](https://bun.sh/docs/api/utils)
- [DNS](https://bun.sh/docs/api/dns)
- [Node-API](https://bun.sh/docs/api/node-api)
- [Glob](https://bun.sh/docs/api/glob)
- [Semver](https://bun.sh/docs/api/semver)
- Project
- [Roadmap](https://bun.sh/docs/project/roadmap)
- [Benchmarking](https://bun.sh/docs/project/benchmarking)
- [Contributing](https://bun.sh/docs/project/contributing)
- [Building Windows](https://bun.sh/docs/project/building-windows)
- [License](https://bun.sh/docs/project/licensing)
## Guides
- Binary
- [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview)
- [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream)
- [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string)
- [Convert a Blob to a Uint8Array](https://bun.sh/guides/binary/blob-to-typedarray)
- [Convert a Blob to an ArrayBuffer](https://bun.sh/guides/binary/blob-to-arraybuffer)
- [Convert a Buffer to a blob](https://bun.sh/guides/binary/buffer-to-blob)
- [Convert a Buffer to a ReadableStream](https://bun.sh/guides/binary/buffer-to-readablestream)
- [Convert a Buffer to a string](https://bun.sh/guides/binary/buffer-to-string)
- [Convert a Buffer to a Uint8Array](https://bun.sh/guides/binary/buffer-to-typedarray)
- [Convert a Buffer to an ArrayBuffer](https://bun.sh/guides/binary/buffer-to-arraybuffer)
- [Convert a DataView to a string](https://bun.sh/guides/binary/dataview-to-string)
- [Convert a Uint8Array to a Blob](https://bun.sh/guides/binary/typedarray-to-blob)
- [Convert a Uint8Array to a Buffer](https://bun.sh/guides/binary/typedarray-to-buffer)
- [Convert a Uint8Array to a DataView](https://bun.sh/guides/binary/typedarray-to-dataview)
- [Convert a Uint8Array to a ReadableStream](https://bun.sh/guides/binary/typedarray-to-readablestream)
- [Convert a Uint8Array to a string](https://bun.sh/guides/binary/typedarray-to-string)
- [Convert a Uint8Array to an ArrayBuffer](https://bun.sh/guides/binary/typedarray-to-arraybuffer)
- [Convert an ArrayBuffer to a Blob](https://bun.sh/guides/binary/arraybuffer-to-blob)
- [Convert an ArrayBuffer to a Buffer](https://bun.sh/guides/binary/arraybuffer-to-buffer)
- [Convert an ArrayBuffer to a string](https://bun.sh/guides/binary/arraybuffer-to-string)
- [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray)
- [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array)
- Ecosystem
- [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite)
- [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro)
- [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs)
- [Build an app with Nuxt and Bun](https://bun.sh/guides/ecosystem/nuxt)
- [Build an app with Qwik and Bun](https://bun.sh/guides/ecosystem/qwik)
- [Build an app with Remix and Bun](https://bun.sh/guides/ecosystem/remix)
- [Build an app with SolidStart and Bun](https://bun.sh/guides/ecosystem/solidstart)
- [Build an app with SvelteKit and Bun](https://bun.sh/guides/ecosystem/sveltekit)
- [Build an HTTP server using Elysia and Bun](https://bun.sh/guides/ecosystem/elysia)
- [Build an HTTP server using Express and Bun](https://bun.sh/guides/ecosystem/express)
- [Build an HTTP server using Hono and Bun](https://bun.sh/guides/ecosystem/hono)
- [Build an HTTP server using StricJS and Bun](https://bun.sh/guides/ecosystem/stric)
- [Containerize a Bun application with Docker](https://bun.sh/guides/ecosystem/docker)
- [Create a Discord bot](https://bun.sh/guides/ecosystem/discordjs)
- [Deploy a Bun application on Render](https://bun.sh/guides/ecosystem/render)
- [Read and write data to MongoDB using Mongoose and Bun](https://bun.sh/guides/ecosystem/mongoose)
- [Run Bun as a daemon with PM2](https://bun.sh/guides/ecosystem/pm2)
- [Run Bun as a daemon with systemd](https://bun.sh/guides/ecosystem/systemd)
- [Server-side render (SSR) a React component](https://bun.sh/guides/ecosystem/ssr-react)
- [Use Drizzle ORM with Bun](https://bun.sh/guides/ecosystem/drizzle)
- [Use EdgeDB with Bun](https://bun.sh/guides/ecosystem/edgedb)
- [Use Neon's Serverless Postgres with Bun](https://bun.sh/guides/ecosystem/neon-serverless-postgres)
- [Use Prisma with Bun](https://bun.sh/guides/ecosystem/prisma)
- [Use React and JSX](https://bun.sh/guides/ecosystem/react)
- [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry)
- HTTP
- [Common HTTP server usage](https://bun.sh/guides/http/server)
- [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls)
- [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix)
- [Hot reload an HTTP server](https://bun.sh/guides/http/hot)
- [Proxy HTTP requests using fetch()](https://bun.sh/guides/http/proxy)
- [Send an HTTP request using fetch](https://bun.sh/guides/http/fetch)
- [Start a cluster of HTTP servers](https://bun.sh/guides/http/cluster)
- [Stream a file as an HTTP Response](https://bun.sh/guides/http/stream-file)
- [Streaming HTTP Server with Async Iterators](https://bun.sh/guides/http/stream-iterator)
- [Streaming HTTP Server with Node.js Streams](https://bun.sh/guides/http/stream-node-streams-in-bun)
- [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads)
- [Write a simple HTTP server](https://bun.sh/guides/http/simple)
- Install
- [Add a dependency](https://bun.sh/guides/install/add)
- [Add a development dependency](https://bun.sh/guides/install/add-dev)
- [Add a Git dependency](https://bun.sh/guides/install/add-git)
- [Add a peer dependency](https://bun.sh/guides/install/add-peer)
- [Add a tarball dependency](https://bun.sh/guides/install/add-tarball)
- [Add a trusted dependency](https://bun.sh/guides/install/trusted)
- [Add an optional dependency](https://bun.sh/guides/install/add-optional)
- [Configure a private registry for an organization scope with bun install](https://bun.sh/guides/install/registry-scope)
- [Configure git to diff Bun's lockb lockfile](https://bun.sh/guides/install/git-diff-bun-lockfile)
- [Configuring a monorepo using workspaces](https://bun.sh/guides/install/workspaces)
- [Generate a human-readable lockfile](https://bun.sh/guides/install/yarnlock)
- [Install a package under a different name](https://bun.sh/guides/install/npm-alias)
- [Install dependencies with Bun in GitHub Actions](https://bun.sh/guides/install/cicd)
- [Override the default npm registry for bun install](https://bun.sh/guides/install/custom-registry)
- [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts)
- [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory)
- Process
- [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds)
- [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c)
- [Listen to OS signals](https://bun.sh/guides/process/os-signals)
- [Parse command-line arguments](https://bun.sh/guides/process/argv)
- [Read from stdin](https://bun.sh/guides/process/stdin)
- [Read stderr from a child process](https://bun.sh/guides/process/spawn-stderr)
- [Read stdout from a child process](https://bun.sh/guides/process/spawn-stdout)
- [Spawn a child process](https://bun.sh/guides/process/spawn)
- [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc)
- Read file
- [Check if a file exists](https://bun.sh/guides/read-file/exists)
- [Get the MIME type of a file](https://bun.sh/guides/read-file/mime)
- [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream)
- [Read a file as a string](https://bun.sh/guides/read-file/string)
- [Read a file to a Buffer](https://bun.sh/guides/read-file/buffer)
- [Read a file to a Uint8Array](https://bun.sh/guides/read-file/uint8array)
- [Read a file to an ArrayBuffer](https://bun.sh/guides/read-file/arraybuffer)
- [Read a JSON file](https://bun.sh/guides/read-file/json)
- [Watch a directory for changes](https://bun.sh/guides/read-file/watch)
- Runtime
- [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger)
- [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger)
- [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant)
- [Import a JSON file](https://bun.sh/guides/runtime/import-json)
- [Import a TOML file](https://bun.sh/guides/runtime/import-toml)
- [Import HTML file as text](https://bun.sh/guides/runtime/import-html)
- [Install and run Bun in GitHub Actions](https://bun.sh/guides/runtime/cicd)
- [Install TypeScript declarations for Bun](https://bun.sh/guides/runtime/typescript)
- [Re-map import paths](https://bun.sh/guides/runtime/tsconfig-paths)
- [Read environment variables](https://bun.sh/guides/runtime/read-env)
- [Run a Shell Command](https://bun.sh/guides/runtime/shell)
- [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone)
- [Set environment variables](https://bun.sh/guides/runtime/set-env)
- Streams
- [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob)
- [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string)
- [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer)
- [Convert a Node.js Readable to JSON](https://bun.sh/guides/streams/node-readable-to-json)
- [Convert a ReadableStream to a Blob](https://bun.sh/guides/streams/to-blob)
- [Convert a ReadableStream to a Buffer](https://bun.sh/guides/streams/to-buffer)
- [Convert a ReadableStream to a string](https://bun.sh/guides/streams/to-string)
- [Convert a ReadableStream to a Uint8Array](https://bun.sh/guides/streams/to-typedarray)
- [Convert a ReadableStream to an array of chunks](https://bun.sh/guides/streams/to-array)
- [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer)
- [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json)
- Test
- [Bail early with the Bun test runner](https://bun.sh/guides/test/bail)
- [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage)
- [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests)
- [Migrate from Jest to Bun's test runner](https://bun.sh/guides/test/migrate-from-jest)
- [Mock functions in `bun test`](https://bun.sh/guides/test/mock-functions)
- [Re-run tests multiple times with the Bun test runner](https://bun.sh/guides/test/rerun-each)
- [Run tests in watch mode with Bun](https://bun.sh/guides/test/watch-mode)
- [Run your tests with the Bun test runner](https://bun.sh/guides/test/run-tests)
- [Set a code coverage threshold with the Bun test runner](https://bun.sh/guides/test/coverage-threshold)
- [Set a per-test timeout with the Bun test runner](https://bun.sh/guides/test/timeout)
- [Set the system time in Bun's test runner](https://bun.sh/guides/test/mock-clock)
- [Skip tests with the Bun test runner](https://bun.sh/guides/test/skip-tests)
- [Spy on methods in `bun test`](https://bun.sh/guides/test/spy-on)
- [Update snapshots in `bun test`](https://bun.sh/guides/test/update-snapshots)
- [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot)
- [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom)
- Util
- [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint)
- [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals)
- [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate)
- [Compress and decompress data with gzip](https://bun.sh/guides/util/gzip)
- [Convert a file URL to an absolute path](https://bun.sh/guides/util/file-url-to-path)
- [Convert an absolute path to a file URL](https://bun.sh/guides/util/path-to-file-url)
- [Detect when code is executed with Bun](https://bun.sh/guides/util/detect-bun)
- [Encode and decode base64 strings](https://bun.sh/guides/util/base64)
- [Escape an HTML string](https://bun.sh/guides/util/escape-html)
- [Get the absolute path of the current file](https://bun.sh/guides/util/import-meta-path)
- [Get the absolute path to the current entrypoint](https://bun.sh/guides/util/main)
- [Get the current Bun version](https://bun.sh/guides/util/version)
- [Get the directory of the current file](https://bun.sh/guides/util/import-meta-dir)
- [Get the file name of the current file](https://bun.sh/guides/util/import-meta-file)
- [Get the path to an executable bin file](https://bun.sh/guides/util/which-path-to-executable-bin)
- [Hash a password](https://bun.sh/guides/util/hash-a-password)
- [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep)
- WebSocket
- [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub)
- [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple)
- [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression)
- [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context)
- Write file
- [Append content to a file](https://bun.sh/guides/write-file/append)
- [Copy a file to another location](https://bun.sh/guides/write-file/file-cp)
- [Delete a file](https://bun.sh/guides/write-file/unlink)
- [Write a Blob to a file](https://bun.sh/guides/write-file/blob)
- [Write a file incrementally](https://bun.sh/guides/write-file/filesink)
- [Write a file to stdout](https://bun.sh/guides/write-file/cat)
- [Write a ReadableStream to a file](https://bun.sh/guides/write-file/stream)
- [Write a Response to a file](https://bun.sh/guides/write-file/response)
- [Write a string to a file](https://bun.sh/guides/write-file/basic)
- [Write to stdout](https://bun.sh/guides/write-file/stdout)
## Contributing
Refer to the [Project > Contributing](https://bun.sh/docs/project/contributing) guide to start contributing to Bun.
Refer to the [Project > Development](https://bun.sh/docs/project/development) guide to start contributing to Bun.
## License

View File

@@ -1,12 +0,0 @@
# Security Policy
## Supported Versions
| Version | Supported |
| ------- | ------------------ |
| 1.x.x | :white_check_mark: |
## Reporting a Vulnerability
Report any discovered vulnerabilities to the Bun team by emailing `security@bun.sh`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue.

View File

@@ -1,30 +0,0 @@
// https://github.com/nodejs/node/issues/34493
import { AsyncLocalStorage } from "async_hooks";
const asyncLocalStorage = new AsyncLocalStorage();
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
let fn = () => /test/.test("test");
let runWithExpiry = async (expiry, fn) => {
let iterations = 0;
while (Date.now() < expiry) {
await fn();
iterations++;
}
return iterations;
};
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
let withAls;
await asyncLocalStorage.run(123, async () => {
withAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
});
asyncLocalStorage.disable();
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { run, bench } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { run, bench } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -1,416 +0,0 @@
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "bench",
"dependencies": {
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7",
"@babel/standalone": "^7.24.7",
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"braces": "^3.0.2",
"color": "^4.2.3",
"esbuild": "^0.14.12",
"eventemitter3": "^5.0.0",
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^1.0.25",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3",
},
"devDependencies": {
"fast-deep-equal": "^3.1.3",
},
},
},
"packages": {
"@ampproject/remapping": ["@ampproject/remapping@2.2.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.1.0", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w=="],
"@babel/code-frame": ["@babel/code-frame@7.18.6", "", { "dependencies": { "@babel/highlight": "^7.18.6" } }, "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q=="],
"@babel/compat-data": ["@babel/compat-data@7.20.14", "", {}, "sha512-0YpKHD6ImkWMEINCyDAD0HLLUH/lPCefG8ld9it8DJB2wnApraKuhgYTvTY1z7UFIfBTGy5LwncZ+5HWWGbhFw=="],
"@babel/core": ["@babel/core@7.20.12", "", { "dependencies": { "@ampproject/remapping": "^2.1.0", "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.20.7", "@babel/helper-compilation-targets": "^7.20.7", "@babel/helper-module-transforms": "^7.20.11", "@babel/helpers": "^7.20.7", "@babel/parser": "^7.20.7", "@babel/template": "^7.20.7", "@babel/traverse": "^7.20.12", "@babel/types": "^7.20.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.2", "semver": "^6.3.0" } }, "sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg=="],
"@babel/generator": ["@babel/generator@7.20.14", "", { "dependencies": { "@babel/types": "^7.20.7", "@jridgewell/gen-mapping": "^0.3.2", "jsesc": "^2.5.1" } }, "sha512-AEmuXHdcD3A52HHXxaTmYlb8q/xMEhoRP67B3T4Oq7lbmSoqroMZzjnGj3+i1io3pdnF8iBYVu4Ilj+c4hBxYg=="],
"@babel/helper-annotate-as-pure": ["@babel/helper-annotate-as-pure@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA=="],
"@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.20.7", "", { "dependencies": { "@babel/compat-data": "^7.20.5", "@babel/helper-validator-option": "^7.18.6", "browserslist": "^4.21.3", "lru-cache": "^5.1.1", "semver": "^6.3.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ=="],
"@babel/helper-environment-visitor": ["@babel/helper-environment-visitor@7.18.9", "", {}, "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg=="],
"@babel/helper-function-name": ["@babel/helper-function-name@7.19.0", "", { "dependencies": { "@babel/template": "^7.18.10", "@babel/types": "^7.19.0" } }, "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w=="],
"@babel/helper-hoist-variables": ["@babel/helper-hoist-variables@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q=="],
"@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="],
"@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.20.11", "", { "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-simple-access": "^7.20.2", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/helper-validator-identifier": "^7.19.1", "@babel/template": "^7.20.7", "@babel/traverse": "^7.20.10", "@babel/types": "^7.20.7" } }, "sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg=="],
"@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.20.2", "", {}, "sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ=="],
"@babel/helper-simple-access": ["@babel/helper-simple-access@7.20.2", "", { "dependencies": { "@babel/types": "^7.20.2" } }, "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA=="],
"@babel/helper-split-export-declaration": ["@babel/helper-split-export-declaration@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA=="],
"@babel/helper-string-parser": ["@babel/helper-string-parser@7.19.4", "", {}, "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw=="],
"@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.19.1", "", {}, "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w=="],
"@babel/helper-validator-option": ["@babel/helper-validator-option@7.18.6", "", {}, "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw=="],
"@babel/helpers": ["@babel/helpers@7.20.13", "", { "dependencies": { "@babel/template": "^7.20.7", "@babel/traverse": "^7.20.13", "@babel/types": "^7.20.7" } }, "sha512-nzJ0DWCL3gB5RCXbUO3KIMMsBY2Eqbx8mBpKGE/02PgyRQFcPQLbkQ1vyy596mZLaP+dAfD+R4ckASzNVmW3jg=="],
"@babel/highlight": ["@babel/highlight@7.18.6", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } }, "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g=="],
"@babel/parser": ["@babel/parser@7.20.15", "", { "bin": "./bin/babel-parser.js" }, "sha512-DI4a1oZuf8wC+oAJA9RW6ga3Zbe8RZFt7kD9i4qAspz3I/yHet1VvC3DiSy/fsUvv5pvJuNPh0LPOdCcqinDPg=="],
"@babel/plugin-syntax-jsx": ["@babel/plugin-syntax-jsx@7.18.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q=="],
"@babel/plugin-transform-react-display-name": ["@babel/plugin-transform-react-display-name@7.18.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA=="],
"@babel/plugin-transform-react-jsx": ["@babel/plugin-transform-react-jsx@7.20.13", "", { "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.20.2", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.20.7" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-MmTZx/bkUrfJhhYAYt3Urjm+h8DQGrPrnKQ94jLo7NLuOU+T89a7IByhKmrb8SKhrIYIQ0FN0CHMbnFRen4qNw=="],
"@babel/plugin-transform-react-jsx-development": ["@babel/plugin-transform-react-jsx-development@7.18.6", "", { "dependencies": { "@babel/plugin-transform-react-jsx": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA=="],
"@babel/plugin-transform-react-pure-annotations": ["@babel/plugin-transform-react-pure-annotations@7.18.6", "", { "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ=="],
"@babel/preset-react": ["@babel/preset-react@7.18.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-transform-react-display-name": "^7.18.6", "@babel/plugin-transform-react-jsx": "^7.18.6", "@babel/plugin-transform-react-jsx-development": "^7.18.6", "@babel/plugin-transform-react-pure-annotations": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg=="],
"@babel/standalone": ["@babel/standalone@7.24.7", "", {}, "sha512-QRIRMJ2KTeN+vt4l9OjYlxDVXEpcor1Z6V7OeYzeBOw6Q8ew9oMTHjzTx8s6ClsZO7wVf6JgTRutihatN6K0yA=="],
"@babel/template": ["@babel/template@7.20.7", "", { "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7" } }, "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw=="],
"@babel/traverse": ["@babel/traverse@7.20.13", "", { "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.20.7", "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-function-name": "^7.19.0", "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/parser": "^7.20.13", "@babel/types": "^7.20.7", "debug": "^4.1.0", "globals": "^11.1.0" } }, "sha512-kMJXfF0T6DIS9E8cgdLCSAL+cuCK+YEZHWiLK0SXpTo8YRj5lpJu3CDNKiIBCne4m9hhTIqUg6SYTAI39tAiVQ=="],
"@babel/types": ["@babel/types@7.20.7", "", { "dependencies": { "@babel/helper-string-parser": "^7.19.4", "@babel/helper-validator-identifier": "^7.19.1", "to-fast-properties": "^2.0.0" } }, "sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg=="],
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="],
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
"@jridgewell/set-array": ["@jridgewell/set-array@1.1.2", "", {}, "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw=="],
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.4.14", "", {}, "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw=="],
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.17", "", { "dependencies": { "@jridgewell/resolve-uri": "3.1.0", "@jridgewell/sourcemap-codec": "1.4.14" } }, "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g=="],
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
"@swc/core": ["@swc/core@1.3.35", "", { "dependencies": { "@swc/core-darwin-arm64": "1.3.35", "@swc/core-darwin-x64": "1.3.35", "@swc/core-linux-arm-gnueabihf": "1.3.35", "@swc/core-linux-arm64-gnu": "1.3.35", "@swc/core-linux-arm64-musl": "1.3.35", "@swc/core-linux-x64-gnu": "1.3.35", "@swc/core-linux-x64-musl": "1.3.35", "@swc/core-win32-arm64-msvc": "1.3.35", "@swc/core-win32-ia32-msvc": "1.3.35", "@swc/core-win32-x64-msvc": "1.3.35" } }, "sha512-KmiBin0XSVzJhzX19zTiCqmLslZ40Cl7zqskJcTDeIrRhfgKdiAsxzYUanJgMJIRjYtl9Kcg1V/Ip2o2wL8v3w=="],
"@swc/core-darwin-arm64": ["@swc/core-darwin-arm64@1.3.35", "", { "os": "darwin", "cpu": "arm64" }, "sha512-zQUFkHx4gZpu0uo2IspvPnKsz8bsdXd5bC33xwjtoAI1cpLerDyqo4v2zIahEp+FdKZjyVsLHtfJiQiA1Qka3A=="],
"@swc/core-darwin-x64": ["@swc/core-darwin-x64@1.3.35", "", { "os": "darwin", "cpu": "x64" }, "sha512-oOSkSGWtALovaw22lNevKD434OQTPf8X+dVPvPMrJXJpJ34dWDlFWpLntoc+arvKLNZ7LQmTuk8rR1hkrAY7cw=="],
"@swc/core-linux-arm-gnueabihf": ["@swc/core-linux-arm-gnueabihf@1.3.35", "", { "os": "linux", "cpu": "arm" }, "sha512-Yie8k00O6O8BCATS/xeKStquV4OYSskUGRDXBQVDw1FrE23PHaSeHCgg4q6iNZjJzXCOJbaTCKnYoIDn9DMf7A=="],
"@swc/core-linux-arm64-gnu": ["@swc/core-linux-arm64-gnu@1.3.35", "", { "os": "linux", "cpu": "arm64" }, "sha512-Zlv3WHa/4x2p51HSvjUWXHfSe1Gl2prqImUZJc8NZOlj75BFzVuR0auhQ+LbwvIQ3gaA1LODX9lyS9wXL3yjxA=="],
"@swc/core-linux-arm64-musl": ["@swc/core-linux-arm64-musl@1.3.35", "", { "os": "linux", "cpu": "arm64" }, "sha512-u6tCYsrSyZ8U+4jLMA/O82veBfLy2aUpn51WxQaeH7wqZGy9TGSJXoO8vWxARQ6b72vjsnKDJHP4MD8hFwcctg=="],
"@swc/core-linux-x64-gnu": ["@swc/core-linux-x64-gnu@1.3.35", "", { "os": "linux", "cpu": "x64" }, "sha512-Dtxf2IbeH7XlNhP1Qt2/MvUPkpEbn7hhGfpSRs4ot8D3Vf5QEX4S/QtC1OsFWuciiYgHAT1Ybjt4xZic9DSkmA=="],
"@swc/core-linux-x64-musl": ["@swc/core-linux-x64-musl@1.3.35", "", { "os": "linux", "cpu": "x64" }, "sha512-4XavNJ60GprjpTiESCu5daJUnmErixPAqDitJSMu4TV32LNIE8G00S9pDLXinDTW1rgcGtQdq1NLkNRmwwovtg=="],
"@swc/core-win32-arm64-msvc": ["@swc/core-win32-arm64-msvc@1.3.35", "", { "os": "win32", "cpu": "arm64" }, "sha512-dNGfKCUSX2M4qVyaS80Lyos0FkXyHRCvrdQ2Y4Hrg3FVokiuw3yY6fLohpUfQ5ws3n2A39dh7jGDeh34+l0sGA=="],
"@swc/core-win32-ia32-msvc": ["@swc/core-win32-ia32-msvc@1.3.35", "", { "os": "win32", "cpu": "ia32" }, "sha512-ChuPSrDR+JBf7S7dEKPicnG8A3bM0uWPsW2vG+V2wH4iNfNxKVemESHosmYVeEZXqMpomNMvLyeHep1rjRsc0Q=="],
"@swc/core-win32-x64-msvc": ["@swc/core-win32-x64-msvc@1.3.35", "", { "os": "win32", "cpu": "x64" }, "sha512-/RvphT4WfuGfIK84Ha0dovdPrKB1bW/mc+dtdmhv2E3EGkNc5FoueNwYmXWRimxnU7X0X7IkcRhyKB4G5DeAmg=="],
"@types/fs-extra": ["@types/fs-extra@11.0.4", "", { "dependencies": { "@types/jsonfile": "*", "@types/node": "*" } }, "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ=="],
"@types/jsonfile": ["@types/jsonfile@6.1.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ=="],
"@types/minimist": ["@types/minimist@1.2.5", "", {}, "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag=="],
"@types/node": ["@types/node@18.19.8", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg=="],
"@types/ps-tree": ["@types/ps-tree@1.1.6", "", {}, "sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ=="],
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
"browserslist": ["browserslist@4.21.5", "", { "dependencies": { "caniuse-lite": "^1.0.30001449", "electron-to-chromium": "^1.4.284", "node-releases": "^2.0.8", "update-browserslist-db": "^1.0.10" }, "bin": { "browserslist": "cli.js" } }, "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w=="],
"caniuse-lite": ["caniuse-lite@1.0.30001456", "", {}, "sha512-XFHJY5dUgmpMV25UqaD4kVq2LsiaU5rS8fb0f17pCoXQiQslzmFgnfOxfvo1bTpTqf7dwG/N/05CnLCnOEKmzA=="],
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
"electron-to-chromium": ["electron-to-chromium@1.4.302", "", {}, "sha512-Uk7C+7aPBryUR1Fwvk9VmipBcN9fVsqBO57jV2ZjTm+IZ6BMNqu7EDVEg2HxCNufk6QcWlFsBkhQyQroB2VWKw=="],
"emoji-regex": ["emoji-regex@10.3.0", "", {}, "sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw=="],
"esbuild": ["esbuild@0.14.54", "", { "dependencies": { "@esbuild/linux-loong64": "0.14.54", "esbuild-android-64": "0.14.54", "esbuild-android-arm64": "0.14.54", "esbuild-darwin-64": "0.14.54", "esbuild-darwin-arm64": "0.14.54", "esbuild-freebsd-64": "0.14.54", "esbuild-freebsd-arm64": "0.14.54", "esbuild-linux-32": "0.14.54", "esbuild-linux-64": "0.14.54", "esbuild-linux-arm": "0.14.54", "esbuild-linux-arm64": "0.14.54", "esbuild-linux-mips64le": "0.14.54", "esbuild-linux-ppc64le": "0.14.54", "esbuild-linux-riscv64": "0.14.54", "esbuild-linux-s390x": "0.14.54", "esbuild-netbsd-64": "0.14.54", "esbuild-openbsd-64": "0.14.54", "esbuild-sunos-64": "0.14.54", "esbuild-windows-32": "0.14.54", "esbuild-windows-64": "0.14.54", "esbuild-windows-arm64": "0.14.54" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA=="],
"esbuild-android-64": ["esbuild-android-64@0.14.54", "", { "os": "android", "cpu": "x64" }, "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ=="],
"esbuild-android-arm64": ["esbuild-android-arm64@0.14.54", "", { "os": "android", "cpu": "arm64" }, "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg=="],
"esbuild-darwin-64": ["esbuild-darwin-64@0.14.54", "", { "os": "darwin", "cpu": "x64" }, "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug=="],
"esbuild-darwin-arm64": ["esbuild-darwin-arm64@0.14.54", "", { "os": "darwin", "cpu": "arm64" }, "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw=="],
"esbuild-freebsd-64": ["esbuild-freebsd-64@0.14.54", "", { "os": "freebsd", "cpu": "x64" }, "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg=="],
"esbuild-freebsd-arm64": ["esbuild-freebsd-arm64@0.14.54", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q=="],
"esbuild-linux-32": ["esbuild-linux-32@0.14.54", "", { "os": "linux", "cpu": "ia32" }, "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw=="],
"esbuild-linux-64": ["esbuild-linux-64@0.14.54", "", { "os": "linux", "cpu": "x64" }, "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg=="],
"esbuild-linux-arm": ["esbuild-linux-arm@0.14.54", "", { "os": "linux", "cpu": "arm" }, "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw=="],
"esbuild-linux-arm64": ["esbuild-linux-arm64@0.14.54", "", { "os": "linux", "cpu": "arm64" }, "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig=="],
"esbuild-linux-mips64le": ["esbuild-linux-mips64le@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw=="],
"esbuild-linux-ppc64le": ["esbuild-linux-ppc64le@0.14.54", "", { "os": "linux", "cpu": "ppc64" }, "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ=="],
"esbuild-linux-riscv64": ["esbuild-linux-riscv64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg=="],
"esbuild-linux-s390x": ["esbuild-linux-s390x@0.14.54", "", { "os": "linux", "cpu": "s390x" }, "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA=="],
"esbuild-netbsd-64": ["esbuild-netbsd-64@0.14.54", "", { "os": "none", "cpu": "x64" }, "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w=="],
"esbuild-openbsd-64": ["esbuild-openbsd-64@0.14.54", "", { "os": "openbsd", "cpu": "x64" }, "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw=="],
"esbuild-sunos-64": ["esbuild-sunos-64@0.14.54", "", { "os": "sunos", "cpu": "x64" }, "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw=="],
"esbuild-windows-32": ["esbuild-windows-32@0.14.54", "", { "os": "win32", "cpu": "ia32" }, "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w=="],
"esbuild-windows-64": ["esbuild-windows-64@0.14.54", "", { "os": "win32", "cpu": "x64" }, "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ=="],
"esbuild-windows-arm64": ["esbuild-windows-arm64@0.14.54", "", { "os": "win32", "cpu": "arm64" }, "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg=="],
"escalade": ["escalade@3.1.1", "", {}, "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="],
"escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="],
"event-stream": ["event-stream@3.3.4", "", { "dependencies": { "duplexer": "~0.1.1", "from": "~0", "map-stream": "~0.1.0", "pause-stream": "0.0.11", "split": "0.3", "stream-combiner": "~0.0.4", "through": "~2.3.1" } }, "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g=="],
"eventemitter3": ["eventemitter3@5.0.0", "", {}, "sha512-riuVbElZZNXLeLEoprfNYoDSwTBRR44X3mnhdI1YcnENpWTCsTTVZ2zFuqQcpoyqPQIUXdiPEU0ECAq0KQRaHg=="],
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-glob": ["fast-glob@3.3.1", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg=="],
"fastq": ["fastq@1.15.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw=="],
"fdir": ["fdir@6.1.0", "", { "peerDependencies": { "picomatch": "2.x" } }, "sha512-274qhz5PxNnA/fybOu6apTCUnM0GnO3QazB6VH+oag/7DQskdYq8lm07ZSm90kEQuWYH5GvjAxGruuHrEr0bcg=="],
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
"fill-range": ["fill-range@7.0.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ=="],
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
"from": ["from@0.1.7", "", {}, "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g=="],
"fs-extra": ["fs-extra@11.2.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw=="],
"fx": ["fx@31.0.0", "", { "bin": { "fx": "index.js" } }, "sha512-OoeYSPKqNKmfnH4s+rGYI0c8OZmqqOOXsUtqy0YyHqQQoQSDiDs3m3M9uXKx5OQR+jDx7/FhYqpO3kl/As/xgg=="],
"gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="],
"get-east-asian-width": ["get-east-asian-width@1.2.0", "", {}, "sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA=="],
"get-stream": ["get-stream@8.0.1", "", {}, "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA=="],
"glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
"globals": ["globals@11.12.0", "", {}, "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA=="],
"globby": ["globby@13.2.2", "", { "dependencies": { "dir-glob": "^3.0.1", "fast-glob": "^3.3.0", "ignore": "^5.2.4", "merge2": "^1.4.1", "slash": "^4.0.0" } }, "sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w=="],
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
"has-flag": ["has-flag@3.0.0", "", {}, "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="],
"human-signals": ["human-signals@5.0.0", "", {}, "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ=="],
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
"is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="],
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
"jsesc": ["jsesc@2.5.2", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA=="],
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
"jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="],
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
"map-stream": ["map-stream@0.1.0", "", {}, "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g=="],
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
"micromatch": ["micromatch@4.0.5", "", { "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" } }, "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA=="],
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
"node-fetch": ["node-fetch@3.3.1", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-cRVc/kyto/7E5shrWca1Wsea4y6tL9iYJE5FBCius3JQfb/4P4I295PfhgbJQBLTx6lATE4z+wK0rPM4VS2uow=="],
"node-releases": ["node-releases@2.0.10", "", {}, "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w=="],
"npm-run-path": ["npm-run-path@5.2.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg=="],
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
"pause-stream": ["pause-stream@0.0.11", "", { "dependencies": { "through": "~2.3" } }, "sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A=="],
"picocolors": ["picocolors@1.0.0", "", {}, "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="],
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"platform": ["platform@1.3.6", "", {}, "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg=="],
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
"semver": ["semver@6.3.0", "", { "bin": { "semver": "./bin/semver.js" } }, "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="],
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
"signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="],
"simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="],
"slash": ["slash@4.0.0", "", {}, "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew=="],
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
"stream-combiner": ["stream-combiner@0.0.4", "", { "dependencies": { "duplexer": "~0.1.1" } }, "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw=="],
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
"strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
"tinycolor2": ["tinycolor2@1.6.0", "", {}, "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw=="],
"to-fast-properties": ["to-fast-properties@2.0.0", "", {}, "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog=="],
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
"update-browserslist-db": ["update-browserslist-db@1.0.10", "", { "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "browserslist-lint": "cli.js" } }, "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ=="],
"web-streams-polyfill": ["web-streams-polyfill@3.3.2", "", {}, "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ=="],
"webpod": ["webpod@0.0.2", "", { "bin": { "webpod": "dist/index.js" } }, "sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg=="],
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
"zx": ["zx@7.2.3", "", { "dependencies": { "@types/fs-extra": "^11.0.1", "@types/minimist": "^1.2.2", "@types/node": "^18.16.3", "@types/ps-tree": "^1.1.2", "@types/which": "^3.0.0", "chalk": "^5.2.0", "fs-extra": "^11.1.1", "fx": "*", "globby": "^13.1.4", "minimist": "^1.2.8", "node-fetch": "3.3.1", "ps-tree": "^1.2.0", "webpod": "^0", "which": "^3.0.0", "yaml": "^2.2.2" }, "bin": { "zx": "build/cli.js" } }, "sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA=="],
"@babel/generator/@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.2", "", { "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A=="],
"@babel/highlight/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="],
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
}
}

Some files were not shown because too many files have changed in this diff Show More