Compare commits

..

2 Commits

Author SHA1 Message Date
Ashcon Partovi
147826d93c Add todoIf, fixme, and fixmeIf to bun:test 2024-02-21 17:16:04 -08:00
Ashcon Partovi
c0a2073dd5 Read .env file in .vscode/launch.json 2024-02-21 16:17:48 -08:00
866 changed files with 35776 additions and 110611 deletions

View File

@@ -1,50 +0,0 @@
name: Setup Bun
description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
baseline:
type: boolean
description: "Whether to use the baseline version of bun."
default: false
required: false
download-url:
type: string
description: "The base URL to download bun from."
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
required: false
runs:
using: composite
steps:
- name: Setup Bun
shell: bash
run: |
case "$(uname -s)" in
Linux*) os=linux;;
Darwin*) os=darwin;;
*) os=windows;;
esac
case "$(uname -m)" in
arm64 | aarch64) arch=arm64;;
*) arch=x64;;
esac
case "${{ inputs.baseline }}" in
true | 1) target="bun-${os}-${arch}-baseline";;
*) target="bun-${os}-${arch}";;
esac
case "${{ inputs.bun-version }}" in
latest) release="latest";;
canary) release="canary";;
*) release="bun-v${{ inputs.bun-version }}";;
esac
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
unzip ${target}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}

View File

@@ -9,7 +9,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -18,11 +17,11 @@ on:
- "build.zig"
- "Makefile"
- "Dockerfile"
- ".github/workflows/bun-linux-aarch64.yml"
pull_request:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -31,6 +30,7 @@ on:
- "build.zig"
- "Makefile"
- "Dockerfile"
- ".github/workflows/bun-linux-aarch64.yml"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@@ -51,14 +51,14 @@ jobs:
runner: linux-arm64
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
submodules: false
ref: ${{github.sha}}
clean: true
- run: |
bash ./scripts/update-submodules.sh
- uses: docker/setup-buildx-action@v3
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
@@ -66,7 +66,7 @@ jobs:
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -74,7 +74,7 @@ jobs:
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
@@ -113,16 +113,14 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1

View File

@@ -9,7 +9,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -22,7 +21,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -49,7 +47,7 @@ jobs:
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
runner: big-ubuntu
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
@@ -58,7 +56,7 @@ jobs:
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
runner: big-ubuntu
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
@@ -88,20 +86,28 @@ jobs:
submodules: recursive
ref: ${{github.sha}}
clean: true
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
@@ -110,6 +116,12 @@ jobs:
GIT_SHA=${{github.sha}}
ASSERTIONS=${{matrix.assertions}}
ZIG_OPTIMIZE=${{matrix.zig_optimize}}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{matrix.build_arch}}
target: ${{matrix.target}}
outputs: type=local,dest=${{runner.temp}}/release
@@ -142,16 +154,22 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -190,7 +208,7 @@ jobs:
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
linux-test:
name: Tests ${{matrix.tag}}
runs-on: namespace-profile-bun-linux-x64
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
@@ -216,7 +234,7 @@ jobs:
clean: true
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -233,18 +251,16 @@ jobs:
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
- id: install-dependnecies
name: Install dependencies
run: |
sudo apt-get update && sudo apt-get install -y openssl
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
bun install --cwd=test/js/third_party/prisma --verbose
# Split these into multiple steps to make it clear which one fails
- name: Install dependencies (apt-get)
run: sudo apt-get update && sudo apt-get install -y openssl
- name: Install dependencies (root)
run: bun install --verbose
- name: Install dependencies (test)
run: bun install --cwd=test --verbose
- name: Install dependencies (runner)
run: bun install --cwd=packages/bun-internal-test --verbose
- name: Install dependencies (prisma)
run: bun install --cwd=test/js/third_party/prisma --verbose
# This is disabled because the cores are ~5.5gb each
# so it is easy to hit 50gb coredump downloads. Only enable if you need to retrive one
@@ -259,19 +275,19 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
ulimit -c unlimited
ulimit -c
node packages/bun-internal-test/src/runner.node.mjs || true
# - uses: actions/upload-artifact@v4
# if: steps.test.outputs.failing_tests != ''
# with:
# name: cores
# path: /cores
# if-no-files-found: "error"
- uses: actions/upload-artifact@v3
if: steps.test.outputs.failing_tests != ''
with:
name: cores
path: /cores
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:

View File

@@ -37,7 +37,7 @@ on:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -47,18 +47,25 @@ jobs:
tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
# - name: Checkout submodules
# run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -77,11 +84,10 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
@@ -135,7 +141,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -153,17 +159,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -223,7 +228,6 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
@@ -231,11 +235,10 @@ jobs:
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS-link:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -282,19 +285,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -309,7 +312,6 @@ jobs:
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
@@ -328,16 +330,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -394,12 +394,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -416,21 +416,18 @@ jobs:
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
# Split these into multiple steps to make it clear which one fails
- name: "Install dependencies (root)"
run: bun install --verbose
- name: "Install dependencies (test)"
run: bun install --cwd=test --verbose
- name: "Install dependencies (runner)"
run: bun install --cwd=packages/bun-internal-test --verbose
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
node packages/bun-internal-test/src/runner.node.mjs || true

View File

@@ -37,7 +37,7 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -53,19 +53,31 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# This was used before, but also does not really work
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
@@ -73,15 +85,22 @@ jobs:
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
runs-on: ${{ matrix.runner }}
@@ -127,7 +146,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -145,17 +164,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -215,7 +233,6 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
@@ -223,11 +240,10 @@ jobs:
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -246,7 +262,7 @@ jobs:
runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
@@ -270,19 +286,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -296,7 +312,6 @@ jobs:
cd ${{runner.temp}}/link-build
cmake $SRC_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
@@ -316,16 +331,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -383,12 +396,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -415,10 +428,8 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
node packages/bun-internal-test/src/runner.node.mjs || true

View File

@@ -37,7 +37,7 @@ on:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -50,19 +50,31 @@ jobs:
tag: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# This was used before, but also does not really work
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
@@ -70,15 +82,22 @@ jobs:
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
@@ -125,7 +144,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -143,17 +162,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -214,18 +232,16 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -244,7 +260,7 @@ jobs:
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
@@ -268,19 +284,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -295,7 +311,6 @@ jobs:
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
@@ -314,16 +329,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -380,12 +393,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -402,21 +415,18 @@ jobs:
run: |
# If this hangs, it means something is seriously wrong with the build
bun --version
# Split these into multiple steps to make it clear which one fails
- name: "Install dependencies (root)"
run: bun install --verbose
- name: "Install dependencies (test)"
run: bun install --cwd=test --verbose
- name: "Install dependencies (runner)"
run: bun install --cwd=packages/bun-internal-test --verbose
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TMPDIR: ${{runner.temp}}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
node packages/bun-internal-test/src/runner.node.mjs || true

View File

@@ -1,44 +0,0 @@
# This workflow tests bun-release's code and the packages to ensure that npm,
# yarn, and pnpm can install bun on all platforms. This does not test that bun
# itself works as it hardcodes 1.1.0 as the version to package.
name: bun-release-test
concurrency: release-test
on:
pull_request:
paths:
- "packages/bun-release/**"
- ".github/workflows/bun-release-test.yml"
jobs:
test-release-script:
name: Test Release Script
strategy:
matrix:
machine: [namespace-profile-bun-linux-x64, linux-arm64, macos-arm64, macos-12-large, windows-latest]
fail-fast: false
runs-on: ${{ matrix.machine }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
timeout-minutes: 5
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.1.0"
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install Dependencies
run: bun install && npm i -g pnpm yarn npm
- name: Release
run: bun upload-npm -- 1.1.0 test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -51,7 +51,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
@@ -81,7 +81,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
@@ -105,7 +105,7 @@ jobs:
working-directory: packages/bun-types
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
@@ -170,12 +170,12 @@ jobs:
suffix: -distroless
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
@@ -192,12 +192,12 @@ jobs:
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
@@ -216,7 +216,7 @@ jobs:
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
@@ -252,7 +252,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:

View File

@@ -18,7 +18,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1

View File

@@ -18,7 +18,6 @@ on:
push:
branches: [main]
paths:
- ".github/workflows/*.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -30,7 +29,6 @@ on:
pull_request:
branches: [main]
paths:
- ".github/workflows/*.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -55,17 +53,20 @@ jobs:
cpu: [haswell, nehalem]
arch: [x86_64]
name: Zig Build
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
timeout-minutes: 60
if: github.repository_owner == 'oven-sh'
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -78,7 +79,8 @@ jobs:
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -92,7 +94,7 @@ jobs:
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-windows-msvc
GIT_SHA=${{ github.sha }}
CANARY=0
CANARY=${{ env.canary == 'true' && steps.canary.outputs.canary_revision || '0' }}
ZIG_OPTIMIZE=ReleaseSafe
# TODO(@paperdave): enable ASSERTIONS=1
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
@@ -100,11 +102,10 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
windows-dependencies:
name: Dependencies
@@ -137,7 +138,7 @@ jobs:
- name: Try fetch dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: bun-deps
key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }}
@@ -164,16 +165,15 @@ jobs:
.\scripts\all-dependencies.ps1
- name: Upload Dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps/
if-no-files-found: "error"
- name: Cache Dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
@@ -204,11 +204,10 @@ jobs:
if: ${{ env.canary == 'true' }}
run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build-codegen-win32-x64/
if-no-files-found: "error"
windows-cpp:
name: C++ Build
@@ -229,7 +228,7 @@ jobs:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
@@ -249,8 +248,7 @@ jobs:
# $env:AWS_SECRET_ACCESS_KEY="${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}"
# $SCCACHE="$PWD/${sczip}/${sczip}/sccache.exe"
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
.\scripts\update-submodules.ps1
@@ -265,11 +263,10 @@ jobs:
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: build/bun-cpp-objects.a
if-no-files-found: "error"
windows-link:
strategy:
@@ -291,22 +288,22 @@ jobs:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
- name: Download Dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-zig
- name: Download C++ Objects
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-cpp
@@ -315,8 +312,7 @@ jobs:
.\scripts\update-submodules.ps1
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
Set-Location build
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
@@ -340,16 +336,14 @@ jobs:
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive "$Dist" "$Dist.zip"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -404,12 +398,12 @@ jobs:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download Release
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{runner.temp}}/release
@@ -423,30 +417,28 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: 20
- uses: secondlife/setup-cygwin@v3
with:
packages: bash
# Split these into multiple steps to make it clear which one fails
- name: Install dependencies (root)
run: bun install --verbose
- name: Install dependencies (test)
run: bun install --cwd=test --verbose
- name: Install dependencies (runner)
run: bun install --cwd=packages/bun-internal-test --verbose
- name: Install dependencies
run: |
# bun install --verbose
# bun install --cwd=test --verbose
# bun install --cwd=packages/bun-internal-test --verbose
npm install
cd test && npm install
cd ../packages/bun-internal-test && npm install
cd ../..
- id: test
name: Run tests
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
SHELLOPTS: igncr
BUN_PATH_BASE: ${{runner.temp}}
BUN_PATH: release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe
run: |
node packages/bun-internal-test/src/runner.node.mjs || true
shell: bash
try {
$ErrorActionPreference = "SilentlyContinue"
$null = node packages/bun-internal-test/src/runner.node.mjs ${{runner.temp}}/release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe || $true
} catch {}
$ErrorActionPreference = "Stop"
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
@@ -464,7 +456,7 @@ jobs:
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
@@ -472,29 +464,26 @@ jobs:
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
### ❌🪟 [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on bun-windows-${{ matrix.arch }}-${{ matrix.cpu }}
@${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
${{ steps.test.outputs.failing_tests }}
${{ steps.test.outputs.regressing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
- name: Comment on PR
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
message: |
@${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on bun-windows-${{ matrix.arch }}-${{ matrix.cpu }}
### ❌🪟 @${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
${{ steps.test.outputs.failing_tests }}
${{ steps.test.outputs.regressing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
if: steps.test.outputs.regressing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
@@ -504,5 +493,5 @@ jobs:
✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved.
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
if: steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
run: exit 1

View File

@@ -1,8 +1,5 @@
name: autofix.ci # Must be named this for autofix.ci to work
permissions:
contents: read
on:
workflow_dispatch:
pull_request:
@@ -13,22 +10,24 @@ on:
env:
ZIG_VERSION: 0.12.0-dev.1828+225fe6ddb
permissions:
contents: read
jobs:
format:
name: format
runs-on: ${{ vars.RUNNER_LINUX_X64 || 'ubuntu-latest' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
src
packages
test
bench
- name: Setup Bun
uses: ./.github/actions/setup-bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Setup Zig

311
.gitignore vendored
View File

@@ -1,143 +1,168 @@
.DS_Store
.env
.envrc
.eslintcache
.idea
.next
.ninja_deps
.ninja_log
.npm
.npm.gz
.parcel-cache
.swcrc
.trace
.uuid
.vs
.vscode/clang*
.vscode/cpp*
*.a
*.bc
*.big
*.blob
*.bun
*.crash
*.database
*.db
*.dmg
*.dSYM
*.jsb
*.lib
*.log
*.o
*.out.js
*.out.refresh.js
*.pdb
*.sqlite
*.tmp
*.trace
*.wat
*.zip
**/.verdaccio-db.json
**/*.dir
**/*.pdb
**/*.sln*
**/*.vcxproj*
**/package-lock.json
/.cache
/.webkit-cache
/build-*/
/bun-webkit
/kcov-out
/src/deps/libuv
/test-report.json
/test-report.md
/test.js
/test.ts
/testdir
build
build.ninja
bun-binary
bun-mimalloc
bun-nomimalloc
bun-singlehtreaded
bun-test-scratch
bun-zigld
cmake_install.cmake
CMakeCache.txt
CMakeFiles
cold-jsc-start
cold-jsc-start.d
compile_commands.json
cover
coverage
coverv
dist
esbuilddir
examples/lotta-modules/bun-nofscache
examples/lotta-modules/bun-old
examples/lotta-modules/bun-yday
failing-tests.txt
github
make-dev-stats.csv
misctools/fetch
misctools/machbench
misctools/sha
myscript.sh
node_modules
node_modules_*
out
out.*
outcss
outdir
outdir/
packages/*/*.wasm
packages/bun-*/*.o
packages/bun-*/bun
packages/bun-*/bun-profile
packages/bun-*/debug-bun
packages/bun-cli/bin/*
packages/bun-cli/postinstall.js
packages/bun-wasm/*.cjs
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.js
packages/bun-wasm/*.map
packages/bun-wasm/*.mjs
packages/debug-*
parceldist
pnpm-lock.yaml
profile.json
README.md.template
release/
sign.*.json
sign.json
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/debug-bindings-obj
src/deps/c-ares/build
src/deps/libiconv
src/deps/openssl
src/deps/PLCrashReporter/
src/deps/s2n-tls
src/deps/zig-clap/.gitattributes
src/deps/zig-clap/.github
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/fallback.version
src/js/out/DebugPath.h
src/js/out/functions*
src/js/out/modules*
src/js/out/tmp
src/node-fallbacks/node_modules
src/node-fallbacks/out/*
src/runtime.version
src/tests.zig
test.txt
test/js/bun/glob/fixtures
tsconfig.tsbuildinfo
txt.js
x64
yarn.lock
zig-cache
zig-out
.DS_Store
zig-cache
packages/*/*.wasm
*.o
*.a
profile.json
.env
node_modules
.envrc
.swcrc
yarn.lock
dist
*.tmp
*.log
*.out.js
*.out.refresh.js
**/package-lock.json
build
*.wat
zig-out
pnpm-lock.yaml
README.md.template
src/deps/zig-clap/example
src/deps/zig-clap/README.md
src/deps/zig-clap/.github
src/deps/zig-clap/.gitattributes
out
outdir
.trace
cover
coverage
coverv
*.trace
github
out.*
out
.parcel-cache
esbuilddir
*.bun
parceldist
esbuilddir
outdir/
outcss
.next
txt.js
.idea
.vscode/cpp*
.vscode/clang*
node_modules_*
*.jsb
*.zip
bun-zigld
bun-singlehtreaded
bun-nomimalloc
bun-mimalloc
examples/lotta-modules/bun-yday
examples/lotta-modules/bun-old
examples/lotta-modules/bun-nofscache
src/node-fallbacks/out/*
src/node-fallbacks/node_modules
sign.json
release/
*.dmg
sign.*.json
packages/debug-*
packages/bun-cli/postinstall.js
packages/bun-*/bun
packages/bun-*/bun-profile
packages/bun-*/debug-bun
packages/bun-*/*.o
packages/bun-cli/postinstall.js
packages/bun-cli/bin/*
bun-test-scratch
misctools/fetch
src/deps/libiconv
src/deps/openssl
src/tests.zig
*.blob
src/deps/s2n-tls
.npm
.npm.gz
bun-binary
src/deps/PLCrashReporter/
*.dSYM
*.crash
misctools/sha
packages/bun-wasm/*.mjs
packages/bun-wasm/*.cjs
packages/bun-wasm/*.map
packages/bun-wasm/*.js
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
*.bc
src/fallback.version
src/runtime.version
*.sqlite
*.database
*.db
misctools/machbench
*.big
.eslintcache
/bun-webkit
src/deps/c-ares/build
src/bun.js/bindings-obj
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/testdir
/test.ts
/test.js
src/js/out/modules*
src/js/out/functions*
src/js/out/tmp
src/js/out/DebugPath.h
make-dev-stats.csv
.uuid
tsconfig.tsbuildinfo
test/js/bun/glob/fixtures
*.lib
*.pdb
CMakeFiles
build.ninja
.ninja_deps
.ninja_log
CMakeCache.txt
cmake_install.cmake
compile_commands.json
*.lib
x64
**/*.vcxproj*
**/*.sln*
**/*.dir
**/*.pdb
/.webkit-cache
/.cache
/src/deps/libuv
/build-*/
.vs
**/.verdaccio-db.json
/test-report.md
/test-report.json

9
.gitmodules vendored
View File

@@ -82,11 +82,4 @@ url = https://github.com/litespeedtech/ls-hpack.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "zig"]
path = src/deps/zig
url = https://github.com/oven-sh/zig
branch = bun
depth = 1
shallow = true
fetchRecurseSubmodules = false
fetchRecurseSubmodules = false

View File

@@ -1,6 +0,0 @@
src/bun.js/WebKit
src/deps
test/snapshots
test/js/deno
src/react-refresh.js
*.min.js

View File

@@ -1,15 +0,0 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false,
"quoteProps": "preserve",
"overrides": [
{
"files": ["*.md"],
"options": {
"printWidth": 80
}
}
]
}

View File

@@ -55,7 +55,6 @@
"name": "BunWithJSCDebug",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/build/codegen",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",

View File

@@ -11,7 +11,7 @@
// JavaScript
"oven.bun-vscode",
"esbenp.prettier-vscode",
"biomejs.biome",
// TypeScript
"better-ts-errors.better-ts-errors",
@@ -28,6 +28,6 @@
"tamasfe.even-better-toml",
// Other
"bierner.comment-tagged-templates"
"bierner.comment-tagged-templates",
]
}

353
.vscode/launch.json generated vendored
View File

@@ -13,27 +13,12 @@
"request": "launch",
"name": "bun test [file]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
"BUN_DEBUG_FileReader": "1"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -42,12 +27,12 @@
"request": "launch",
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -56,12 +41,26 @@
"request": "launch",
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -70,12 +69,12 @@
"request": "launch",
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--watch", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--watch", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -84,12 +83,12 @@
"request": "launch",
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--hot", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--hot", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -98,7 +97,7 @@
"request": "launch",
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -108,7 +107,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -118,7 +117,7 @@
"request": "launch",
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -128,7 +127,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -144,9 +143,9 @@
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
"console": "internalConsole",
},
{
"type": "lldb",
@@ -158,7 +157,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -172,7 +171,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -186,7 +185,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -200,7 +199,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -219,10 +218,10 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "lldb",
@@ -239,10 +238,10 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// bun test [...]
{
@@ -250,12 +249,12 @@
"request": "launch",
"name": "bun test [...]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -264,12 +263,12 @@
"request": "launch",
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -278,12 +277,26 @@
"request": "launch",
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -292,12 +305,12 @@
"request": "launch",
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -306,12 +319,12 @@
"request": "launch",
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -320,7 +333,7 @@
"request": "launch",
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -330,7 +343,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -340,7 +353,7 @@
"request": "launch",
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -350,26 +363,11 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
// bun exec [...]
{
"type": "lldb",
"request": "launch",
"name": "bun exec [...]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
// bun test [*]
{
"type": "lldb",
@@ -395,7 +393,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -414,25 +412,11 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun install [folder]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["install"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -448,7 +432,7 @@
"request": "launch",
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -459,61 +443,18 @@
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_DEBUG_jest",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "1"
}
]
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_DEBUG_EventLoop",
"value": "1"
},
{
"name": "BUN_DEBUG_uv",
"value": "1"
},
{
"name": "BUN_DEBUG_SYS",
"value": "1"
},
{
"name": "BUN_DEBUG_PipeWriter",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -528,14 +469,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -550,14 +491,36 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
},
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -578,17 +541,17 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -609,10 +572,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun run [file]
{
@@ -635,26 +598,29 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun install",
"name": "Windows: bun run [file] (fast)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["install"],
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -676,7 +642,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -704,10 +670,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -735,10 +701,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun test [...]
{
@@ -746,7 +712,7 @@
"request": "launch",
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -761,14 +727,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -783,14 +749,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -805,14 +771,36 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --only",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -827,14 +815,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -849,14 +837,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -877,17 +865,17 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -908,33 +896,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
// Windows: bun exec [...]
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun exec [...]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
},
},
// Windows: bun test [*]
{
@@ -957,7 +922,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -979,7 +944,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -1007,10 +972,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -1020,7 +985,7 @@
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"console": "internalConsole"
}
},
],
"inputs": [
{
@@ -1032,6 +997,6 @@
"id": "testName",
"type": "promptString",
"description": "Usage: bun test [...]"
}
},
]
}

35
.vscode/settings.json vendored
View File

@@ -40,7 +40,7 @@
// C++
"lldb.verboseLogging": false,
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"C_Cpp.errorSquiggles": "enabled",
"[cpp]": {
"editor.defaultFormatter": "xaver.clang-format"
},
@@ -52,37 +52,37 @@
},
// JavaScript
"prettier.enable": true,
"prettier.enable": false,
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome",
},
"[javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
// TypeScript
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
// JSON
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome",
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome",
},
// Markdown
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.unicodeHighlight.ambiguousCharacters": true,
"editor.unicodeHighlight.invisibleCharacters": true,
"editor.defaultFormatter": "biomejs.biome",
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
@@ -94,17 +94,12 @@
// TOML
"[toml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome",
},
// YAML
"[yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
// Docker
"[dockerfile]": {
"editor.formatOnSave": false
"editor.defaultFormatter": "biomejs.biome",
},
// Files
@@ -153,5 +148,5 @@
"WebKit/WebDriver": true,
"WebKit/WebKitBuild": true,
"WebKit/WebInspectorUI": true
}
},
}

View File

@@ -2,8 +2,8 @@ cmake_minimum_required(VERSION 3.22)
cmake_policy(SET CMP0091 NEW)
cmake_policy(SET CMP0067 NEW)
set(Bun_VERSION "1.1.3")
set(WEBKIT_TAG e3a2d89a0b1644cc8d5c245bd2ffee4d4bd6c1d5)
set(Bun_VERSION "1.0.28")
set(WEBKIT_TAG c3712c13dcdc091cfe4c7cb8f2c1fd16472e6f92)
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
@@ -41,53 +41,10 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
else()
if(ZIG_OPTIMIZE STREQUAL "Debug")
set(bun "bun-debug")
else()
set(bun "bun-profile")
endif()
set(bun "bun-profile")
endif()
endif()
# --- MacOS SDK ---
if(APPLE AND DEFINED ENV{CI})
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
endif()
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
execute_process(COMMAND xcrun --show-sdk-path OUTPUT_VARIABLE SDKROOT)
string(STRIP ${SDKROOT} SDKROOT)
message(STATUS "MacOS SDK path: ${SDKROOT}")
SET(CMAKE_OSX_SYSROOT ${SDKROOT})
execute_process(COMMAND xcrun --sdk macosx --show-sdk-version OUTPUT_VARIABLE MACOSX_DEPLOYMENT_TARGET)
string(STRIP ${MACOSX_DEPLOYMENT_TARGET} MACOSX_DEPLOYMENT_TARGET)
set(CMAKE_OSX_DEPLOYMENT_TARGET ${MACOSX_DEPLOYMENT_TARGET})
# Check if current version of macOS is less than the deployment target and if so, raise an error
execute_process(COMMAND sw_vers -productVersion OUTPUT_VARIABLE MACOS_VERSION)
string(STRIP ${MACOS_VERSION} MACOS_VERSION)
if(MACOS_VERSION VERSION_LESS ${MACOSX_DEPLOYMENT_TARGET})
message(WARNING
"The current version of macOS (${MACOS_VERSION}) is less than the deployment target (${MACOSX_DEPLOYMENT_TARGET}).\n"
"The build will be incompatible with your current device due to mismatches in `icucore` versions.\n"
"To fix this, please either:\n"
" - Upgrade to at least macOS ${MACOSX_DEPLOYMENT_TARGET}\n"
" - Use `xcode-select` to switch to an SDK version <= ${MACOS_VERSION}\n"
" - Set CMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION} (make sure to build all dependencies with this variable set too)"
)
endif()
endif()
if(APPLE)
message(STATUS "Building for macOS v${CMAKE_OSX_DEPLOYMENT_TARGET}")
endif()
# --- LLVM ---
# This detection is a little overkill, but it ensures that the set LLVM_VERSION matches under
# any case possible. Sorry for the complexity...
@@ -270,13 +227,6 @@ set(DEFAULT_USE_DEBUG_JSC, OFF)
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(DEFAULT_USE_DEBUG_JSC ON)
set(DEFAULT_LTO OFF)
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if(CI)
set(DEFAULT_LTO ON)
else()
set(DEFAULT_LTO OFF)
endif()
endif()
if(WIN32)
@@ -284,9 +234,9 @@ if(WIN32)
endif()
if(UNIX AND NOT APPLE)
execute_process(COMMAND grep -w "NAME" /etc/os-release OUTPUT_VARIABLE LINUX_DISTRO)
execute_process(COMMAND cat /etc/os-release COMMAND head -n1 OUTPUT_VARIABLE LINUX_DISTRO)
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"|NAME=\"openSUSE Tumbleweed\"\n")
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"\n")
set(DEFAULT_USE_STATIC_LIBATOMIC OFF)
endif()
endif()
@@ -313,12 +263,6 @@ option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore"
option(USE_UNIFIED_SOURCES "Use unified sources to speed up the build" OFF)
option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of libatomic.a" ${DEFAULT_USE_STATIC_LIBATOMIC})
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
if(NOT ZIG_LIB_DIR)
cmake_path(SET ZIG_LIB_DIR NORMALIZE "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/zig/lib")
endif()
if(USE_VALGRIND)
# Disable SIMD
set(USE_BASELINE_BUILD ON)
@@ -341,11 +285,9 @@ endif()
set(ERROR_LIMIT 100 CACHE STRING "Maximum number of errors to show when compiling C++ code")
set(ARCH x86_64)
set(HOMEBREW_PREFIX "/usr/local")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
set(ARCH aarch64)
set(HOMEBREW_PREFIX "/opt/homebrew")
endif()
if(NOT CPU_TARGET)
@@ -486,13 +428,7 @@ if(NOT WEBKIT_DIR)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-debug")
set(ASSERT_ENABLED "1")
elseif(NOT DEBUG AND NOT WIN32)
# Avoid waiting for LTO in local release builds outside of CI
if(USE_LTO)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
else()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "")
endif()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
set(ASSERT_ENABLED "0")
endif()
@@ -527,13 +463,6 @@ if(NOT WEBKIT_DIR)
endif()
set(WEBKIT_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include")
if(APPLE)
set(ICU_INCLUDE_DIR "")
else()
set(ICU_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include/wtf/unicode")
endif()
set(WEBKIT_LIB_DIR "${BUN_WORKDIR}/bun-webkit/lib")
elseif(WEBKIT_DIR STREQUAL "omit")
message(STATUS "Not using WebKit. This is only valid if you are only trying to build Zig code")
@@ -555,7 +484,6 @@ else()
add_compile_definitions("BUN_DEBUG=1")
set(ASSERT_ENABLED "1")
endif()
message(STATUS "Using WebKit from ${WEBKIT_DIR}")
else()
if(NOT EXISTS "${WEBKIT_DIR}/lib/${libWTF}.${STATIC_LIB_EXT}" OR NOT EXISTS "${WEBKIT_DIR}/lib/${libJavaScriptCore}.${STATIC_LIB_EXT}")
if(WEBKIT_DIR MATCHES "src/bun.js/WebKit$")
@@ -750,26 +678,19 @@ if(NOT NO_CODEGEN)
file(GLOB BUN_TS_MODULES ${CONFIGURE_DEPENDS}
"${BUN_SRC}/js/node/*.ts"
"${BUN_SRC}/js/node/*.js"
"${BUN_SRC}/js/bun/*.ts"
"${BUN_SRC}/js/bun/*.js"
"${BUN_SRC}/js/builtins/*.ts"
"${BUN_SRC}/js/builtins/*.js"
"${BUN_SRC}/js/bun/*.ts"
"${BUN_SRC}/js/thirdparty/*.js"
"${BUN_SRC}/js/thirdparty/*.ts"
"${BUN_SRC}/js/internal/*.js"
"${BUN_SRC}/js/internal/*.ts"
"${BUN_SRC}/js/node/*.js"
"${BUN_SRC}/js/node/*.ts"
"${BUN_SRC}/js/thirdparty/*.js"
"${BUN_SRC}/js/thirdparty/*.ts"
)
file(GLOB BUN_TS_FUNCTIONS ${CONFIGURE_DEPENDS} "${BUN_SRC}/js/builtins/*.ts")
file(GLOB CODEGEN_FILES ${CONFIGURE_DEPENDS} "${BUN_CODEGEN_SRC}/*.ts")
add_custom_command(
OUTPUT
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+createInternalModuleById.h"
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+enum.h"
@@ -777,12 +698,10 @@ if(NOT NO_CODEGEN)
"${BUN_WORKDIR}/codegen/NativeModuleImpl.h"
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
"${BUN_WORKDIR}/codegen/SyntheticModuleType.h"
"${BUN_WORKDIR}/codegen/GeneratedJS2Native.h"
"${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig"
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-modules.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
DEPENDS ${BUN_TS_MODULES} ${CODEGEN_FILES}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Bundling JS"
COMMENT "Bundling JS modules"
)
endif()
@@ -790,6 +709,15 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
"${BUN_SRC}/bun.js/bindings/InternalModuleRegistry.cpp"
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
)
add_custom_command(
OUTPUT "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-functions.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
DEPENDS ${BUN_TS_FUNCTIONS} ${CODEGEN_FILES}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Bundling JS builtin functions"
)
list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp")
# --- Peechy API ---
@@ -857,10 +785,8 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
OUTPUT "${BUN_ZIG_OBJ}"
COMMAND
"${ZIG_COMPILER}" "build" "obj"
"--zig-lib-dir" "${ZIG_LIB_DIR}"
"-Doutput-file=${BUN_ZIG_OBJ}"
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
"-freference-trace=10"
"-Dversion=${Bun_VERSION}"
"-Dcanary=${CANARY}"
"-Doptimize=${ZIG_OPTIMIZE}"
@@ -873,7 +799,6 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
"${BUN_IDENTIFIER_CACHE_OUT}"
"${BUN_SRC}/api/schema.zig"
"${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Building zig code"
VERBATIM
@@ -954,10 +879,6 @@ else()
add_compile_definitions("ASSERT_ENABLED=1")
endif()
if(ICU_INCLUDE_DIR)
include_directories(${ICU_INCLUDE_DIR})
endif()
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/packages/
${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets
@@ -1017,14 +938,6 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
-Werror=return-type
-Werror=return-stack-address
-Werror=implicit-function-declaration
-Werror=uninitialized
-Werror=conditional-uninitialized
-Werror=suspicious-memaccess
-Werror=move
-Werror=sometimes-uninitialized
-Werror=unused
-Wno-unused-function
-Werror
)
else()
target_compile_options(${bun} PUBLIC /Od /Z7)
@@ -1032,36 +945,15 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
add_compile_definitions("BUN_DEBUG=1")
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(LTO_FLAG "")
if(NOT WIN32)
if(USE_LTO)
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
endif()
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
target_compile_options(${bun} PUBLIC -O3 ${LTO_FLAG} -g1
target_compile_options(${bun} PUBLIC -O3 -flto=full -emit-llvm -g1
-Werror=return-type
-Werror=return-stack-address
-Werror=implicit-function-declaration
-Werror=uninitialized
-Werror=conditional-uninitialized
-Werror=suspicious-memaccess
-Werror=move
-Werror=sometimes-uninitialized
-Werror
)
else()
set(LTO_LINK_FLAG "")
if(USE_LTO)
# -emit-llvm seems to not be supported or under a different name on Windows.
list(APPEND LTO_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "/LTCG")
endif()
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG} /DEBUG /Z7)
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG)
target_compile_options(${bun} PUBLIC /O2 -flto=full /DEBUG /Z7)
target_link_options(${bun} PUBLIC /LTCG /DEBUG)
endif()
endif()
@@ -1113,6 +1005,12 @@ else()
endif()
if(APPLE)
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
target_link_options(${bun} PUBLIC "-dead_strip")
target_link_options(${bun} PUBLIC "-dead_strip_dylibs")
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
@@ -1178,7 +1076,21 @@ endif()
# --- ICU ---
if(APPLE)
# TODO: a much better check can be done to find this path
find_path(
ICU4C_DIR NAMES lib/libicudata.a
PATHS ENV PATH /usr/local/opt/icu4c /opt/homebrew/opt/icu4c
)
find_path(
ICONV_DIR NAMES lib/libiconv.a
PATHS ENV PATH /usr/local/opt/libiconv /opt/homebrew/opt/libiconv
)
target_link_libraries(${bun} PRIVATE "icucore")
target_link_libraries(${bun} PRIVATE "${ICONV_DIR}/lib/libiconv.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicudata.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicui18n.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicuuc.a")
endif()
# --- Stripped Binary "bun"

View File

@@ -16,7 +16,7 @@ ARG BUILD_MACHINE_ARCH=x86_64
ARG BUILDARCH=amd64
ARG TRIPLET=${ARCH}-linux-gnu
ARG GIT_SHA=""
ARG BUN_VERSION="bun-v1.0.30"
ARG BUN_VERSION="bun-v1.0.7"
ARG BUN_DOWNLOAD_URL_BASE="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${BUN_VERSION}"
ARG CANARY=0
ARG ASSERTIONS=OFF
@@ -116,7 +116,7 @@ RUN apt-get update -y \
&& case "${arch##*-}" in \
amd64) variant="x64";; \
arm64) variant="aarch64";; \
*) echo "unsupported architecture: $arch"; exit 1 ;; \
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
esac \
&& wget "${BUN_DOWNLOAD_URL_BASE}/bun-linux-${variant}.zip" \
&& unzip bun-linux-${variant}.zip \
@@ -372,7 +372,7 @@ ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache mkdir ${BUN_DIR}/build \
&& cd ${BUN_DIR}/build \
&& mkdir -p tmp_modules tmp_functions js codegen \
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_LTO=ON -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
&& bash compile-cpp-only.sh -v
FROM bun-base-with-zig as bun-codegen-for-zig
@@ -414,12 +414,11 @@ COPY --from=bun-codegen-for-zig ${BUN_DIR}/packages/bun-error/dist ${BUN_DIR}/pa
WORKDIR $BUN_DIR
RUN mkdir -p build \
&& bun run $BUN_DIR/src/codegen/bundle-modules.ts --debug=OFF $BUN_DIR/build \
&& bun run $BUN_DIR/src/codegen/bundle-modules-fast.ts $BUN_DIR/build \
&& cd build \
&& cmake .. \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
-DCPU_TARGET="${CPU_TARGET}" \
-DZIG_TARGET="${TRIPLET}" \
@@ -429,7 +428,6 @@ RUN mkdir -p build \
-DBUN_ZIG_OBJ="/tmp/bun-zig.o" \
-DCANARY="${CANARY}" \
-DZIG_COMPILER=system \
-DZIG_LIB_DIR=$BUN_DIR/src/deps/zig/lib \
&& ONLY_ZIG=1 ninja "/tmp/bun-zig.o" -v
FROM scratch as build_release_obj
@@ -478,7 +476,6 @@ RUN cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
-DUSE_LTO=ON \
-DUSE_DEBUG_JSC=${ASSERTIONS} \
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
@@ -543,7 +540,6 @@ RUN cmake .. \
-DNO_CONFIGURE_DEPENDS=1 \
-DCANARY="${CANARY}" \
-DZIG_COMPILER=system \
-DUSE_LTO=ON \
&& ninja -v \
&& ./bun --revision \
&& mkdir -p /build/out \

View File

@@ -24,9 +24,9 @@
## What is Bun?
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
@@ -34,28 +34,27 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
bun run index.tsx # TS and JSX supported out-of-the-box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
```bash
bun test # run tests
bun run start # run the `start` script in `package.json`
bun install <pkg> # install a package
bun install <pkg> # install a package
bunx cowsay 'Hello, world!' # execute a package
```
## Install
Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
>
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
```sh
# with install script (recommended)
curl -fsSL https://bun.sh/install | bash
# on windows
powershell -c "irm bun.sh/install.ps1 | iex"
# with npm
npm install -g bun

Binary file not shown.

View File

@@ -12,7 +12,7 @@
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"string-width": "^7.0.0",
"zx": "^7.2.3"
},
"scripts": {

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,9 @@
import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { mkdirSync, writeFileSync } from "fs";
import { bench, run } from "./runner.mjs";
import { cp } from "fs/promises";
import { join } from "path";
import { tmpdir } from "os";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {
const root = join(tmpdir(), "huge");
const base = join(root, "directory", "for", "benchmarks", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10");
@@ -19,21 +18,14 @@ const hugeDirectory = (() => {
const hugeFilePath = join(tmpdir(), "huge-file-0.txt");
const hugeText = "Hello, world!".repeat(1000000);
writeFileSync(hugeFilePath, hugeText);
let base = process.argv.at(-1);
if (resolve(base) === fileURLToPath(import.meta.url)) {
base = tmpdir();
} else {
rmSync(base, { recursive: true, force: true });
mkdirSync(base, { recursive: true });
}
var hugeCopyI = 0;
bench("cp -r (1000 files)", async b => {
await cp(hugeDirectory, join(base, "huge-copy" + hugeCopyI++), { recursive: true });
await cp(hugeDirectory, join(tmpdir(), "huge-copy" + hugeCopyI++), { recursive: true });
});
bench("cp 1 " + ((hugeText.length / 1024) | 0) + " KB file", async b => {
await cp(hugeFilePath, join(base, "huge-file" + hugeCopyI++));
await cp(hugeFilePath, join(tmpdir(), "huge-file" + hugeCopyI++));
});
await run();

View File

@@ -5,11 +5,6 @@ const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
const callback = noop.callback;
bench("C++ callback into JS", () => {
callback(() => {});
});
bench("C++ fn regular", () => {
regular();

View File

@@ -1,73 +0,0 @@
import { tmpdir } from "node:os";
import { bench, group, run } from "./runner.mjs";
import { createReadStream, writeFileSync } from "node:fs";
import { sep } from "node:path";
if (!Promise.withResolvers) {
Promise.withResolvers = function () {
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
};
}
const ALLOW_BUN = typeof Bun !== "undefined";
const ALLOW_NODE = true;
const dir = tmpdir() + sep;
var short = (function () {
const text = "Hello World!";
const path = dir + "bun-bench-short.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var shortUTF16 = (function () {
const text = "Hello World 💕💕💕";
const path = dir + "bun-bench-shortUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var long = (function () {
const text = "Hello World!".repeat(1024);
const path = dir + "bun-bench-long.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var longUTF16 = (function () {
const text = "Hello World 💕💕💕".repeat(15 * 70192);
const path = dir + "bun-bench-longUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
async function bun(path) {
for await (const chunk of Bun.file(path).stream()) {
chunk;
}
}
async function node(path) {
const { promise, resolve } = Promise.withResolvers();
const stream = createReadStream(path);
stream.on("data", chunk => {});
stream.on("end", () => resolve());
await promise;
}
ALLOW_BUN && bench("short - bun", () => bun(short.path));
ALLOW_NODE && bench("short - node", () => node(short.path));
ALLOW_BUN && bench("shortUTF16 - bun", () => bun(shortUTF16.path));
ALLOW_NODE && bench("shortUTF16 - node", () => node(shortUTF16.path));
ALLOW_BUN && bench("long - bun", () => bun(long.path));
ALLOW_NODE && bench("long - node", () => node(long.path));
ALLOW_BUN && bench("longUTF16 - bun", () => bun(longUTF16.path));
ALLOW_NODE && bench("longUTF16 - node", () => node(longUTF16.path));
await run();

View File

@@ -3,38 +3,41 @@ import npmStringWidth from "string-width";
const bunStringWidth = globalThis?.Bun?.stringWidth;
const stringWidth = bunStringWidth || npmStringWidth;
const formatter = new Intl.NumberFormat();
const format = n => {
return formatter.format(n);
};
bench("npm/string-width (ansi + emoji + ascii)", () => {
npmStringWidth("hello there! 😀\u001b[31m😀😀");
});
const inputs = [
["hello", "ascii"],
["[31mhello", "ascii+ansi"],
["hello😀", "ascii+emoji"],
["[31m😀😀", "ansi+emoji"],
["😀hello😀[31m😀😀😀", "ansi+emoji+ascii"],
];
bench("npm/string-width (ansi + emoji)", () => {
npmStringWidth("😀\u001b[31m😀😀");
});
const repeatCounts = [1, 10, 100, 1000, 5000];
bench("npm/string-width (ansi + ascii)", () => {
npmStringWidth("\u001b[31mhello there!");
});
const maxInputLength = Math.max(...inputs.map(([input]) => input.repeat(Math.max(...repeatCounts)).length));
if (bunStringWidth) {
bench("Bun.stringWidth (ansi + emoji + ascii)", () => {
bunStringWidth("hello there! 😀\u001b[31m😀😀");
});
for (const [input, textLabel] of inputs) {
for (let repeatCount of repeatCounts) {
const label = bunStringWidth ? "Bun.stringWidth" : "npm/string-width";
bench("Bun.stringWidth (ansi + emoji)", () => {
bunStringWidth("😀\u001b[31m😀😀");
});
const str = input.repeat(repeatCount);
const name = `${label} ${format(str.length).padStart(format(maxInputLength).length, " ")} chars ${textLabel}`;
bench("Bun.stringWidth (ansi + ascii)", () => {
bunStringWidth("\u001b[31mhello there!");
});
bench(name, () => {
stringWidth(str);
});
if (npmStringWidth("😀\u001b[31m😀😀") !== bunStringWidth("😀\u001b[31m😀😀")) {
console.error("string-width mismatch");
}
if (bunStringWidth && bunStringWidth(str) !== npmStringWidth(str)) {
throw new Error("string-width mismatch");
}
if (npmStringWidth("hello there! 😀\u001b[31m😀😀") !== bunStringWidth("hello there! 😀\u001b[31m😀😀")) {
console.error("string-width mismatch");
}
if (npmStringWidth("\u001b[31mhello there!") !== bunStringWidth("\u001b[31mhello there!")) {
console.error("string-width mismatch");
}
}

View File

@@ -1,4 +1,4 @@
import { Database } from "https://deno.land/x/sqlite3@0.11.1/mod.ts";
import { Database } from "https://deno.land/x/sqlite3@0.9.1/mod.ts";
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
const db = new Database("./src/northwind.sqlite");

View File

@@ -7,8 +7,8 @@
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"deps": "npm install && bash src/download.sh",
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
"deps": "npm install && sh src/download.sh",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

70
biome.json Normal file
View File

@@ -0,0 +1,70 @@
{
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": false
},
"javascript": {
"parser": {
"unsafeParameterDecoratorsEnabled": true
},
"formatter": {
"arrowParentheses": "asNeeded",
"quoteProperties": "preserve",
"semicolons": "always",
"trailingComma": "all",
"indentStyle": "space",
"quoteStyle": "double"
}
},
"json": {
"formatter": {
"indentStyle": "space"
},
"parser": {
"allowComments": true,
"allowTrailingCommas": true
}
},
"vcs": {
"clientKind": "git",
"enabled": false,
"root": "./"
},
"files": {
"maxSize": 9128312873
},
"formatter": {
"enabled": true,
"indentWidth": 2,
"lineEnding": "lf",
"formatWithErrors": true,
"lineWidth": 120,
"indentStyle": "space",
"ignore": [
"node_modules/**",
"test/snapshots",
"test/fixtures",
".next",
"test/js/deno",
"./src/deps",
"./src/bun.js/WebKit/**",
"packages/bun-polyfills",
"./build-*",
"./build",
".cache",
"out/",
"test/transpiler/property-non-ascii-fixture.js",
"test/transpiler/macro-test.test.ts",
"test/transpiler/decorator-metadata.test.ts",
"src/react-refresh.js",
"bindings-obj/*",
"src/deps/**",
"./bench/react-hello-world/react-hello-world.node.js",
"./test/cli/run/require-cache-bug-leak-fixture-large-ast.js",
"./test/cli/run/esm-leak-fixture-large-ast.mjs"
]
}
}

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const pathRel = std.fs.path.relative;
const builtin = @import("builtin");
const Wyhash11 = @import("./src/wyhash.zig").Wyhash11;
const Wyhash = @import("./src/wyhash.zig").Wyhash;
const zig_version = builtin.zig_version;
@@ -84,7 +84,7 @@ const BunBuildOptions = struct {
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const runtime_hash = Wyhash11.hash(
const runtime_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
@@ -97,7 +97,7 @@ const BunBuildOptions = struct {
if (std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const fallback_hash = Wyhash11.hash(
const fallback_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
@@ -629,5 +629,3 @@ pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, obj_step: *
obj.link_function_sections = true;
}
}
// !

BIN
bun.lockb

Binary file not shown.

View File

@@ -6,4 +6,3 @@
#
# Instead, we can only scan the test directory for Bun's runtime tests
root = "test"
preload = "./test/preload.ts"

View File

@@ -2,7 +2,7 @@
name: bun
appspec: { version: "0.001" }
plugins: [-Meta]
title: A tool for installing and managing JavaScript packages
title: A tool for installing and managing Python packages
options:
- version|V --Show version and exit

View File

@@ -96,20 +96,18 @@ FROM alpine:3.18
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
# Ensure `bun install -g` works
ARG BUN_INSTALL_BIN=/usr/local/bin
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
COPY --from=build /tmp/glibc.apk /tmp/
COPY --from=build /tmp/glibc-bin.apk /tmp/
COPY --from=build /usr/local/bin/bun /usr/local/bin/
COPY docker-entrypoint.sh /usr/local/bin/
# Temporarily use the `build`-stage /tmp folder to access the glibc APKs:
RUN --mount=type=bind,from=build,source=/tmp,target=/tmp \
addgroup -g 1000 bun \
RUN addgroup -g 1000 bun \
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
&& apk --no-cache --force-overwrite --allow-untrusted add \
/tmp/glibc.apk \
/tmp/glibc-bin.apk \
&& rm /tmp/glibc.apk \
&& rm /tmp/glibc-bin.apk \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \

View File

@@ -62,10 +62,6 @@ FROM debian:bullseye-slim
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
# Ensure `bun install -g` works
ARG BUN_INSTALL_BIN=/usr/local/bin
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
COPY docker-entrypoint.sh /usr/local/bin
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun

View File

@@ -3,8 +3,6 @@ FROM debian:bullseye-slim AS build
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
# Node.js includes python3 for node-gyp, see https://github.com/oven-sh/bun/issues/9807
# Though, not on slim and alpine images.
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
@@ -13,7 +11,6 @@ RUN apt-get update -qq \
gpg \
gpg-agent \
unzip \
python3 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
@@ -66,10 +63,6 @@ COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
# Ensure `bun install -g` works
ARG BUN_INSTALL_BIN=/usr/local/bin
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
RUN groupadd bun \
--gid 1000 \
&& useradd bun \

View File

@@ -62,10 +62,6 @@ FROM gcr.io/distroless/base-nossl-debian11
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
# Ensure `bun install -g` works
ARG BUN_INSTALL_BIN=/usr/local/bin
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
COPY --from=build /usr/local/bin/bun /usr/local/bin/
# Temporarily use the `build`-stage image binaries to create a symlink:

View File

@@ -58,18 +58,17 @@ Pass a path to the shared library and a map of symbols to import into `dlopen`:
```ts
import { dlopen, FFIType, suffix } from "bun:ffi";
const { i32 } = FFIType;
const path = `libadd.${suffix}`;
const lib = dlopen(path, {
add: {
args: [i32, i32],
returns: i32,
args: [FFIType.i32, FFIType.i32],
returns: FFIType.i32,
},
});
console.log(lib.symbols.add(1, 2));
lib.symbols.add(1, 2);
```
### Rust
@@ -77,7 +76,7 @@ console.log(lib.symbols.add(1, 2));
```rust
// add.rs
#[no_mangle]
pub extern "C" fn add(a: i32, b: i32) -> i32 {
pub extern "C" fn add(a: isize, b: isize) -> isize {
a + b
}
```
@@ -88,22 +87,6 @@ To compile:
$ rustc --crate-type cdylib add.rs
```
### C++
```c
#include <cstdint>
extern "C" int32_t add(int32_t a, int32_t b) {
return a + b;
}
```
To compile:
```bash
$ zig build-lib add.cpp -dynamic -lc -lc++
```
## FFI types
The following `FFIType` values are supported.

View File

@@ -6,12 +6,12 @@ Bun implements the following properties.
import.meta.dir; // => "/path/to/project"
import.meta.file; // => "file.ts"
import.meta.path; // => "/path/to/project/file.ts"
import.meta.url; // => "file:///path/to/project/file.ts"
import.meta.main; // `true` if this file is directly executed by `bun run`
// `false` otherwise
import.meta.resolve("zod"); // => "file:///path/to/project/node_modules/zod/index.js"
import.meta.resolveSync("zod")
// resolve an import specifier relative to the directory
```
{% table %}
@@ -28,18 +28,13 @@ import.meta.resolve("zod"); // => "file:///path/to/project/node_modules/zod/inde
---
- `import.meta.env`
- An alias to `process.env`.
---
- `import.meta.file`
- The name of the current file, e.g. `index.tsx`
---
- `import.meta.path`
- Absolute path to the current file, e.g. `/path/to/project/index.ts`. Equivalent to `__filename` in CommonJS modules (and Node.js)
- Absolute path to the current file, e.g. `/path/to/project/index.tx`. Equivalent to `__filename` in CommonJS modules (and Node.js)
---
@@ -48,22 +43,30 @@ import.meta.resolve("zod"); // => "file:///path/to/project/node_modules/zod/inde
---
- `import.meta.main`
- Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
- `import.meta.url`
- A string url to the current file, e.g. `file:///path/to/project/index.tx`
---
- `import.meta.resolve`
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to a url. Equivalent to [`import.meta.resolve` in browsers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta#resolve)
- `import.meta.main`
- `boolean` Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
---
- `import.meta.env`
- An alias to `process.env`.
---
- `import.meta.resolve{Sync}`
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to an absolute path. While file would be imported if the specifier were imported from this file?
```ts
import.meta.resolve("zod");
// => "file:///path/to/project/node_modules/zod/index.ts"
import.meta.resolveSync("zod");
// => "/path/to/project/node_modules/zod/index.ts"
import.meta.resolveSync("./file.tsx");
// => "/path/to/project/file.tsx"
```
---
- `import.meta.url`
- A `string` url to the current file, e.g. `file:///path/to/project/index.ts`. Equivalent to [`import.meta.url` in browsers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta#url)
{% /table %}

View File

@@ -186,7 +186,6 @@ proc.unref();
## Inter-process communication (IPC)
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
{%callout%}
**Note** — This API is only compatible with other `bun` processes. Use `process.execPath` to get a path to the currently running `bun` executable.
{%/callout%}
@@ -228,6 +227,8 @@ process.on("message", (message) => {
});
```
All messages are serialized using the JSC `serialize` API, which allows for the same set of [transferrable types](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects) supported by `postMessage` and `structuredClone`, including strings, typed arrays, streams, and objects.
```ts#child.ts
// send a string
process.send("Hello from child as string");
@@ -236,11 +237,6 @@ process.send("Hello from child as string");
process.send({ message: "Hello from child as object" });
```
The `ipcMode` option controls the underlying communication format between the two processes:
- `advanced`: (default) Messages are serialized using the JSC `serialize` API, which supports cloning [everything `structuredClone` supports](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm). This does not support transferring ownership of objects.
- `json`: Messages are serialized using `JSON.stringify` and `JSON.parse`, which does not support as many object types as `advanced` does.
## Blocking API (`Bun.spawnSync()`)
Bun provides a synchronous equivalent of `Bun.spawn` called `Bun.spawnSync`. This is a blocking API that supports the same inputs and parameters as `Bun.spawn`. It returns a `SyncSubprocess` object, which differs from `Subprocess` in a few ways.

View File

@@ -115,7 +115,7 @@ Use `Bun.connect` to connect to a TCP server. Specify the server to connect to w
```ts
// The client
const socket = await Bun.connect({
const socket = Bun.connect({
hostname: "localhost",
port: 8080,
@@ -138,7 +138,7 @@ To require TLS, specify `tls: true`.
```ts
// The client
const socket = await Bun.connect({
const socket = Bun.connect({
// ... config
tls: true,
});
@@ -164,7 +164,7 @@ server.reload({
```
```ts#Client
const socket = await Bun.connect({ /* config */ })
const socket = Bun.connect({ /* config */ })
socket.reload({
data(){
// new 'data' handler

View File

@@ -261,12 +261,13 @@ This function is optimized for large input. On an M1X, it processes 480 MB/s -
20 GB/s, depending on how much data is being escaped and whether there is non-ascii
text. Non-string types will be converted to a string before escaping.
## `Bun.stringWidth()` ~6,756x faster `string-width` alternative
## `Bun.stringWidth()`
Get the column count of a string as it would be displayed in a terminal.
Supports ANSI escape codes, emoji, and wide characters.
```ts
Bun.stringWidth(input: string, options?: { countAnsiEscapeCodes?: boolean = false }): number
```
Example usage:
Returns the number of columns required to display a string. This is useful for aligning text in a terminal. By default, ANSI escape codes are removed before measuring the string. To include them, pass `{ countAnsiEscapeCodes: true }` as the second argument.
```ts
Bun.stringWidth("hello"); // => 5
@@ -274,131 +275,8 @@ Bun.stringWidth("\u001b[31mhello\u001b[0m"); // => 5
Bun.stringWidth("\u001b[31mhello\u001b[0m", { countAnsiEscapeCodes: true }); // => 12
```
This is useful for:
- Aligning text in a terminal
- Quickly checking if a string contains ANSI escape codes
- Measuring the width of a string in a terminal
Compared with the popular `string-width` npm package, `bun`'s implementation is > [100x faster](https://github.com/oven-sh/bun/blob/8abd1fb088bcf2e78bd5d0d65ba4526872d2ab61/bench/snippets/string-width.mjs#L22)
This API is designed to match the popular "string-width" package, so that
existing code can be easily ported to Bun and vice versa.
[In this benchmark](https://github.com/oven-sh/bun/blob/5147c0ba7379d85d4d1ed0714b84d6544af917eb/bench/snippets/string-width.mjs#L13), `Bun.stringWidth` is a ~6,756x faster than the `string-width` npm package for input larger than about 500 characters. Big thanks to [sindresorhus](https://github.com/sindresorhus) for their work on `string-width`!
```ts
bun string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: bun 1.0.29 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
Bun.stringWidth 500 chars ascii 37.09 ns/iter (36.77 ns … 41.11 ns) 37.07 ns 38.84 ns 38.99 ns
node string-width.mjs
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
npm/string-width 500 chars ascii 249,710 ns/iter (239,970 ns … 293,180 ns) 250,930 ns 276,700 ns 281,450 ns
```
To make `Bun.stringWidth` fast, we've implemented it in Zig using optimized SIMD instructions, accounting for Latin1, UTF-16, and UTF-8 encodings. It passes `string-width`'s tests.
{% details summary="View full benchmark" %}
As a reminder, 1 nanosecond (ns) is 1 billionth of a second. Here's a quick reference for converting between units:
| Unit | 1 Millisecond |
| ---- | ------------- |
| ns | 1,000,000 |
| µs | 1,000 |
| ms | 1 |
```js
bun string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: bun 1.0.29 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
Bun.stringWidth 5 chars ascii 16.45 ns/iter (16.27 ns … 19.71 ns) 16.48 ns 16.93 ns 17.21 ns
Bun.stringWidth 50 chars ascii 19.42 ns/iter (18.61 ns … 27.85 ns) 19.35 ns 21.7 ns 22.31 ns
Bun.stringWidth 500 chars ascii 37.09 ns/iter (36.77 ns … 41.11 ns) 37.07 ns 38.84 ns 38.99 ns
Bun.stringWidth 5,000 chars ascii 216.9 ns/iter (215.8 ns … 228.54 ns) 216.23 ns 228.52 ns 228.53 ns
Bun.stringWidth 25,000 chars ascii 1.01 µs/iter (1.01 µs … 1.01 µs) 1.01 µs 1.01 µs 1.01 µs
Bun.stringWidth 7 chars ascii+emoji 54.2 ns/iter (53.36 ns … 58.19 ns) 54.23 ns 57.55 ns 57.94 ns
Bun.stringWidth 70 chars ascii+emoji 354.26 ns/iter (350.51 ns … 363.96 ns) 355.93 ns 363.11 ns 363.96 ns
Bun.stringWidth 700 chars ascii+emoji 3.3 µs/iter (3.27 µs … 3.4 µs) 3.3 µs 3.4 µs 3.4 µs
Bun.stringWidth 7,000 chars ascii+emoji 32.69 µs/iter (32.22 µs … 45.27 µs) 32.7 µs 34.57 µs 34.68 µs
Bun.stringWidth 35,000 chars ascii+emoji 163.35 µs/iter (161.17 µs … 170.79 µs) 163.82 µs 169.66 µs 169.93 µs
Bun.stringWidth 8 chars ansi+emoji 66.15 ns/iter (65.17 ns … 69.97 ns) 66.12 ns 69.8 ns 69.87 ns
Bun.stringWidth 80 chars ansi+emoji 492.95 ns/iter (488.05 ns … 499.5 ns) 494.8 ns 498.58 ns 499.5 ns
Bun.stringWidth 800 chars ansi+emoji 4.73 µs/iter (4.71 µs … 4.88 µs) 4.72 µs 4.88 µs 4.88 µs
Bun.stringWidth 8,000 chars ansi+emoji 47.02 µs/iter (46.37 µs … 67.44 µs) 46.96 µs 49.57 µs 49.63 µs
Bun.stringWidth 40,000 chars ansi+emoji 234.45 µs/iter (231.78 µs … 240.98 µs) 234.92 µs 236.34 µs 236.62 µs
Bun.stringWidth 19 chars ansi+emoji+ascii 135.46 ns/iter (133.67 ns … 143.26 ns) 135.32 ns 142.55 ns 142.77 ns
Bun.stringWidth 190 chars ansi+emoji+ascii 1.17 µs/iter (1.16 µs … 1.17 µs) 1.17 µs 1.17 µs 1.17 µs
Bun.stringWidth 1,900 chars ansi+emoji+ascii 11.45 µs/iter (11.26 µs … 20.41 µs) 11.45 µs 12.08 µs 12.11 µs
Bun.stringWidth 19,000 chars ansi+emoji+ascii 114.06 µs/iter (112.86 µs … 120.06 µs) 114.25 µs 115.86 µs 116.15 µs
Bun.stringWidth 95,000 chars ansi+emoji+ascii 572.69 µs/iter (565.52 µs … 607.22 µs) 572.45 µs 604.86 µs 605.21 µs
```
```ts
node string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: node v21.4.0 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
-------------------------------------------------------------------------------------- -----------------------------
npm/string-width 5 chars ascii 3.19 µs/iter (3.13 µs … 3.48 µs) 3.25 µs 3.48 µs 3.48 µs
npm/string-width 50 chars ascii 20.09 µs/iter (18.93 µs … 435.06 µs) 19.49 µs 21.89 µs 22.59 µs
npm/string-width 500 chars ascii 249.71 µs/iter (239.97 µs … 293.18 µs) 250.93 µs 276.7 µs 281.45 µs
npm/string-width 5,000 chars ascii 6.69 ms/iter (6.58 ms … 6.76 ms) 6.72 ms 6.76 ms 6.76 ms
npm/string-width 25,000 chars ascii 139.57 ms/iter (137.17 ms … 143.28 ms) 140.49 ms 143.28 ms 143.28 ms
npm/string-width 7 chars ascii+emoji 3.7 µs/iter (3.62 µs … 3.94 µs) 3.73 µs 3.94 µs 3.94 µs
npm/string-width 70 chars ascii+emoji 23.93 µs/iter (22.44 µs … 331.2 µs) 23.15 µs 25.98 µs 30.2 µs
npm/string-width 700 chars ascii+emoji 251.65 µs/iter (237.78 µs … 444.69 µs) 252.92 µs 325.89 µs 354.08 µs
npm/string-width 7,000 chars ascii+emoji 4.95 ms/iter (4.82 ms … 5.19 ms) 5 ms 5.04 ms 5.19 ms
npm/string-width 35,000 chars ascii+emoji 96.93 ms/iter (94.39 ms … 102.58 ms) 97.68 ms 102.58 ms 102.58 ms
npm/string-width 8 chars ansi+emoji 3.92 µs/iter (3.45 µs … 4.57 µs) 4.09 µs 4.57 µs 4.57 µs
npm/string-width 80 chars ansi+emoji 24.46 µs/iter (22.87 µs … 4.2 ms) 23.54 µs 25.89 µs 27.41 µs
npm/string-width 800 chars ansi+emoji 259.62 µs/iter (246.76 µs … 480.12 µs) 258.65 µs 349.84 µs 372.55 µs
npm/string-width 8,000 chars ansi+emoji 5.46 ms/iter (5.41 ms … 5.57 ms) 5.48 ms 5.55 ms 5.57 ms
npm/string-width 40,000 chars ansi+emoji 108.91 ms/iter (107.55 ms … 109.5 ms) 109.25 ms 109.5 ms 109.5 ms
npm/string-width 19 chars ansi+emoji+ascii 6.53 µs/iter (6.35 µs … 6.75 µs) 6.54 µs 6.75 µs 6.75 µs
npm/string-width 190 chars ansi+emoji+ascii 55.52 µs/iter (52.59 µs … 352.73 µs) 54.19 µs 80.77 µs 167.21 µs
npm/string-width 1,900 chars ansi+emoji+ascii 701.71 µs/iter (653.94 µs … 893.78 µs) 715.3 µs 855.37 µs 872.9 µs
npm/string-width 19,000 chars ansi+emoji+ascii 27.19 ms/iter (26.89 ms … 27.41 ms) 27.28 ms 27.41 ms 27.41 ms
npm/string-width 95,000 chars ansi+emoji+ascii 3.68 s/iter (3.66 s … 3.7 s) 3.69 s 3.7 s 3.7 s
```
{% /details %}
TypeScript definition:
```ts
namespace Bun {
export function stringWidth(
/**
* The string to measure
*/
input: string,
options?: {
/**
* If `true`, count ANSI escape codes as part of the string width. If `false`, ANSI escape codes are ignored when calculating the string width.
*
* @default false
*/
countAnsiEscapeCodes?: boolean;
/**
* When it's ambiugous and `true`, count emoji as 1 characters wide. If `false`, emoji are counted as 2 character wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
},
): number;
}
```
<!-- ## `Bun.enableANSIColors()` -->
@@ -635,7 +513,7 @@ Bun.resolveSync("zod", "/path/to/project");
// => "/path/to/project/node_modules/zod/index.ts"
```
To resolve relative to the current working directory, pass `process.cwd()` or `"."` as the root.
To resolve relative to the current working directory, pass `process.cwd` or `"."` as the root.
```ts
Bun.resolveSync("./foo.ts", process.cwd());

View File

@@ -26,31 +26,12 @@ All imported files and packages are bundled into the executable, along with a co
**Note** — Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:
- `--outdir` — use `outfile` instead.
- `--external`
- `--splitting`
- `--public-path`
{% /callout %}
## Deploying to production
Compiled executables reduce memory usage and improve Bun's start time.
Normally, Bun reads and transpiles JavaScript and TypeScript files on `import` and `require`. This is part of what makes so much of Bun "just work", but it's not free. It costs time and memory to read files from disk, resolve file paths, parse, transpile, and print source code.
With compiled executables, you can move that cost from runtime to build-time.
When deploying to production, we recommend the following:
```sh
bun build --compile --minify --sourcemap ./path/to/my/app.ts --outfile myapp
```
**What do these flags do?**
The `--minify` argument optimizes the size of the transpiled output code. If you have a large application, this can save megabytes of space. For smaller applications, it might still improve start time a little.
The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that errors & stacktraces point to their original locations instead of the transpiled location. Bun will automatically decompress & resolve the sourcemap when an error occurs.
## SQLite
You can use `bun:sqlite` imports with `bun build --compile`.

View File

@@ -156,7 +156,7 @@ Like the Bun runtime, the bundler supports an array of file types out of the box
---
- `.js` `.jsx`, `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx`
- Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms including dead code elimination and tree shaking. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code.
- Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms, including dead code elimination, tree shaking, and environment variable inlining. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code.
---

View File

@@ -10,7 +10,7 @@ Bun uses the file extension to determine which built-in _loader_ should be used
**JavaScript**. Default for `.cjs` and `.mjs`.
Parses the code and applies a set of default transforms like dead-code elimination and tree shaking. Note that Bun does not attempt to down-convert syntax at the moment.
Parses the code and applies a set of default transforms, like dead-code elimination, tree shaking, and environment variable inlining. Note that Bun does not attempt to down-convert syntax at the moment.
### `jsx`
@@ -178,7 +178,7 @@ In the bundler, `.node` files are handled using the [`file`](#file) loader.
In the runtime and bundler, SQLite databases can be directly imported. This will load the database using [`bun:sqlite`](/docs/api/sqlite.md).
```ts
import db from "./my.db" with { type: "sqlite" };
import db from "./my.db" with {type: "sqlite"};
```
This is only supported when the `target` is `bun`.
@@ -189,21 +189,21 @@ You can change this behavior with the `"embed"` attribute:
```ts
// embed the database into the bundle
import db from "./my.db" with { type: "sqlite", embed: "true" };
import db from "./my.db" with {type: "sqlite", embed: "true"};
```
When using a [standalone executable](/docs/bundler/executables), the database is embedded into the single-file executable.
Otherwise, the database to embed is copied into the `outdir` with a hashed filename.
### `sh` loader
### `bunshell` loader
**Bun Shell loader**. Default for `.sh` files
**Bun Shell loader**. Default for `.bun.sh` files
This loader is used to parse [Bun Shell](/docs/runtime/shell) scripts. It's only supported when starting Bun itself, so it's not available in the bundler or in the runtime.
This loader is used to parse [Bun Shell](/docs/runtime/shell) scripts. It's only supported when starting bun itself, so it's not available in the bundler or in the runtime.
```sh
$ bun run ./script.sh
$ bun run ./script.bun.sh
```
### `file`

View File

@@ -7,7 +7,7 @@ There are a few behavioral differences to note.
## Performance
With a performance-minded API coupled with the extensively optimized Zig-based JS/TS parser, Bun's bundler is 1.75x faster than esbuild on esbuild's [three.js benchmark](https://github.com/oven-sh/bun/tree/main/bench/bundle).
With an performance-minded API coupled with the extensively optimized Zig-based JS/TS parser, Bun's bundler is 1.75x faster than esbuild on esbuild's [three.js benchmark](https://github.com/oven-sh/bun/tree/main/bench/bundle).
{% image src="/images/bundler-speed.png" caption="Bundling 10 copies of three.js from scratch, with sourcemaps and minification" /%}

View File

@@ -77,4 +77,4 @@ Bun automatically loads environment variables from `.env` files before running a
2. `NODE_ENV` === `"production"` ? `.env.production` : `.env.development`
3. `.env`
To debug environment variables, run `bun --print process.env` to view a list of resolved environment variables. -->
To debug environment variables, run `bun run env` to view a list of resolved environment variables. -->

View File

@@ -195,7 +195,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
- name: Install dependencies

View File

@@ -75,6 +75,14 @@ $ bun run dev --watch # ❌ don't do this
Flags that occur at the end of the command will be ignored and passed through to the `"dev"` script itself.
{% /callout %}
### `--smol`
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
## Run a `package.json` script
{% note %}
@@ -87,7 +95,7 @@ $ bun [bun flags] run <script> [script flags]
Your `package.json` can define a number of named `"scripts"` that correspond to shell commands.
```json
```jsonc
{
// ... other fields
"scripts": {
@@ -150,32 +158,3 @@ By default, Bun respects this shebang and executes the script with `node`. Howev
```bash
$ bun run --bun vite
```
## `bun run -` to pipe code from stdin
`bun run -` lets you read JavaScript, TypeScript, TSX, or JSX from stdin and execute it without writing to a temporary file first.
```bash
$ echo "console.log('Hello')" | bun run -
Hello
```
You can also use `bun run -` to redirect files into Bun. For example, to run a `.js` file as if it were a `.ts` file:
```bash
$ echo "console.log!('This is TypeScript!' as any)" > secretly-typescript.js
$ bun run - < secretly-typescript.js
This is TypeScript!
```
For convenience, all code is treated as TypeScript with JSX support when using `bun run -`.
## `bun run --smol`
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
This causes the garbage collector to run more frequently, which can slow down execution. However, it can be useful in environments with limited memory. Bun automatically adjusts the garbage collector's heap size based on the available memory (accounting for cgroups and other memory limits) with and without the `--smol` flag, so this is mostly useful for cases where you want to make the heap size grow more slowly.

View File

@@ -1,33 +0,0 @@
---
name: fetch with unix domain sockets in Bun
---
In Bun, the `unix` option in `fetch()` lets you send HTTP requests over a [unix domain socket](https://en.wikipedia.org/wiki/Unix_domain_socket).
```ts
const unix = "/var/run/docker.sock";
const response = await fetch("http://localhost/info", { unix });
const body = await response.json();
console.log(body); // { ... }
```
---
The `unix` option is a string that specifies the local file path to a unix domain socket. The `fetch()` function will use the socket to send the request to the server instead of using a TCP network connection. `https` is also supported by using the `https://` protocol in the URL instead of `http://`.
To send a `POST` request to an API endpoint over a unix domain socket:
```ts
const response = await fetch("https://hostname/a/path", {
unix: "/var/run/path/to/unix.sock",
method: "POST",
body: JSON.stringify({ message: "Hello from Bun!" }),
headers: {
"Content-Type": "application/json",
},
});
const body = await response.json();
```

View File

@@ -2,7 +2,7 @@
name: Common HTTP server usage
---
This starts an HTTP server listening on port `3000`. It demonstrates basic routing with a number of common responses and also handles POST data from standard forms or as JSON.
This starts an HTTP server listening on port `3000`. It demonstates basic routing with a number of common responses and also handles POST data from standard forms or as JSON.
See [`Bun.serve`](/docs/api/http) for details.

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# ...
- uses: actions/checkout@v4
- uses: actions/checkout@v3
+ - uses: oven-sh/setup-bun@v1
# run any `bun` or `bunx` command

View File

@@ -18,10 +18,10 @@ Bun.env.API_TOKEN; // => "secret"
---
To print all currently-set environment variables to the command line, run `bun --print process.env`. This is useful for debugging.
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
```sh
$ bun --print process.env
$ bun run env
BAZ=stuff
FOOBAR=aaaaaa
<lots more lines>

View File

@@ -1,40 +0,0 @@
---
name: Run a Shell Command
---
Bun Shell is a cross-platform bash-like shell built in to Bun.
It provides a simple way to run shell commands in JavaScript and TypeScript. To get started, import the `$` function from the `bun` package and use it to run shell commands.
```ts#foo.ts
import { $ } from "bun";
await $`echo Hello, world!`; // => "Hello, world!"
```
---
The `$` function is a tagged template literal that runs the command and returns a promise that resolves with the command's output.
```ts#foo.ts
import { $ } from "bun";
const output = await $`ls -l`.text();
console.log(output);
```
---
To get each line of the output as an array, use the `lines` method.
```ts#foo.ts
import { $ } from "bun";
for await (const line of $`ls -l`.lines()) {
console.log(line);
}
```
---
See [Docs > API > Shell](/docs/runtime/shell) for complete documentation.

View File

@@ -16,7 +16,7 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext","DOM"],
"lib": ["ESNext"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",

View File

@@ -4,7 +4,7 @@ name: Migrate from Jest to Bun's test runner
In many cases, Bun's test runner can run Jest test suites with no code changes. Just run `bun test` instead of `npx jest`, `yarn test`, etc.
```sh
```sh-diff
- $ npx jest
- $ yarn test
+ $ bun test
@@ -57,7 +57,7 @@ Replace `bail` in your Jest config with the `--bail` CLI flag.
- };
``` -->
```sh
```sh-diff
$ bun test --bail 3
```

View File

@@ -1,15 +0,0 @@
---
name: Get the path to an executable bin file
---
`Bun.which` is a utility function to find the absolute path of an executable file. It is similar to the `which` command in Unix-like systems.
```ts#foo.ts
Bun.which("sh"); // => "/bin/sh"
Bun.which("notfound"); // => null
Bun.which("bun"); // => "/home/user/.bun/bin/bun"
```
---
See [Docs > API > Utils](/docs/api/utils#bun-which) for complete documentation.

View File

@@ -0,0 +1,28 @@
---
name: Upgrade an HTTP request to a WebSocket connection
---
Inside `fetch`, use the `server.upgrade()` function to upgrade an incoming `Request` to a WebSocket connection. Bun automatically returns a 101 Switching Protocols response if the upgrade succeeds.
Refer to the [WebSocket docs](/docs/api/websockets) for more information on building WebSocket servers.
```ts
const server = Bun.serve<{ authToken: string }>({
fetch(req, server) {
const success = server.upgrade(req);
if (success) {
// Bun automatically returns a 101 Switching Protocols
// if the upgrade succeeds
return undefined;
}
// handle HTTP request normally
return new Response("Hello world!");
},
websocket: {
// define websocket handlers
},
});
console.log(`Listening on localhost:\${server.port}`);
```

View File

@@ -1,4 +1,4 @@
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
@@ -6,18 +6,18 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
$ bun run index.tsx # TS and JSX supported out of the box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager, all significantly faster than existing tools and usable in existing Node.js projects with little to no changes necessary.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager, all significantly faster than existing tools and usable in existing Node.js projects with little to no changes necessary.
```bash
$ bun run start # run the `start` script
$ bun install <pkg> # install a package
$ bun install <pkg> # install a package
$ bun build ./index.tsx # bundle a project for browsers
$ bun test # run tests
$ bunx cowsay 'Hello, world!' # execute a package
```
{% callout type="note" %}
**Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
**Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
{% /callout %}
Get started with one of the quick links below, or read on to learn more about Bun.

View File

@@ -1,6 +1,6 @@
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
{% details summary="Configuring cache behavior" (bunfig.toml) %}
{% details summary="Configuring cache behavior" %}
```toml
[install.cache]

View File

@@ -22,7 +22,8 @@ $ npm install -g bun # the last `npm` command you'll ever need
```
```bash#Homebrew
$ brew install oven-sh/bun/bun # for macOS and Linux
$ brew tap oven-sh/bun # for macOS and Linux
$ brew install bun
```
```bash#Docker
@@ -42,20 +43,21 @@ $ proto install bun
Bun requires a minimum of Windows 10 version 1809
{% /callout %}
Bun provides a _limited, experimental_ native build for Windows. It is recommended to use Bun within [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install) and follow the above instructions. To help catch bugs, the experimental build enables many debugging assertions, which will make the binary slower than what the stable version will be.
To install, paste this into a terminal:
{% codetabs %}
```powershell#PowerShell/cmd.exe
> powershell -c "irm bun.sh/install.ps1|iex"
```
```powershell#npm
> npm install -g bun # the last `npm` command you'll ever need
# WARNING: No stability is guaranteed on the experimental Windows builds
powershell -c "irm bun.sh/install.ps1|iex"
```
```powershell#Scoop
> scoop install bun
# WARNING: No stability is guaranteed on the experimental Windows builds
scoop bucket add versions
scoop install bun-canary
```
{% /codetabs %}
@@ -144,8 +146,6 @@ $ bun upgrade
{% callout %}
**Homebrew users** — To avoid conflicts with Homebrew, use `brew upgrade bun` instead.
**Scoop users** — To avoid conflicts with Scoop, use `scoop upgrade bun` instead.
**proto users** - Use `proto install bun --pin` instead.
{% /callout %}
@@ -233,15 +233,11 @@ If you need to remove Bun from your system, use the following commands.
$ rm -rf ~/.bun # for macOS, Linux, and WSL
```
```powershell#Windows
> powershell -c ~\.bun\uninstall.ps1
```bash#Windows
$ Remove-Item ~\.bun -Recurse
```
```powershell#Scoop
> scoop uninstall bun
```
```bash#npm
```bash#NPM
$ npm uninstall -g bun
```

View File

@@ -39,7 +39,7 @@ I recommend using VSCode through SSH instead of Tunnels or the Tailscale extensi
By default, running unverified scripts are blocked.
```ps1
> Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
```
### System Dependencies
@@ -47,7 +47,7 @@ By default, running unverified scripts are blocked.
- Bun 1.1 or later. We use Bun to run it's own code generators.
```ps1
> irm bun.sh/install.ps1 | iex
irm bun.sh/install.ps1 | iex
```
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload.
@@ -70,28 +70,28 @@ The Zig compiler is automatically downloaded, installed, and updated by the buil
[Scoop](https://scoop.sh) can be used to install these remaining tools easily:
```ps1
> irm https://get.scoop.sh | iex
> scoop install nodejs-lts go rust nasm ruby perl
# scoop seems to be buggy if you install llvm and the rest at the same time
> scoop llvm@16.0.4
irm https://get.scoop.sh | iex
scoop install nodejs-lts go rust nasm ruby perl
scoop llvm@16.0.4 # scoop bug if you install llvm and the rest at the same time
```
If you intend on building WebKit locally (optional), you should install these packages:
```ps1
> scoop install make cygwin python
scoop install make cygwin python
```
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\env.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
```ps1
> .\scripts\env.ps1
.\scripts\env.ps1
```
To verify, you can check for an MSVC-only command line such as `mt.exe`
```ps1
> Get-Command mt
Get-Command mt
```
{% callout %}
@@ -101,24 +101,24 @@ It is not recommended to install `ninja` / `cmake` into your global path, becaus
## Building
```ps1
> bun install
bun install
> .\scripts\env.ps1
> .\scripts\update-submodules.ps1 # this syncs git submodule state
> .\scripts\all-dependencies.ps1 # this builds all dependencies
> .\scripts\make-old-js.ps1 # runs some old code generators
.\scripts\env.ps1
.\scripts\update-submodules.ps1 # this syncs git submodule state
.\scripts\all-dependencies.ps1 # this builds all dependencies
.\scripts\make-old-js.ps1 # runs some old code generators
# Configure build environment
> cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
# Build bun
> ninja -Cbuild
ninja -Cbuild
```
If this was successful, you should have a `bun-debug.exe` in the `build` folder.
```ps1
> .\build\bun-debug.exe --revision
.\build\bun-debug.exe --revision
```
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
@@ -134,15 +134,15 @@ You can run the test suite either using `bun test`, or by using the wrapper scri
```ps1
# Setup
> bun i --cwd packages\bun-internal-test
bun i --cwd packages\bun-internal-test
# Run the entire test suite with reporter
# the package.json script "test" uses "build/bun-debug.exe" by default
> bun run test
bun run test
# Run an individual test file:
> bun-debug test node\fs
> bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
bun-debug test node\fs
bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
```
## Troubleshooting

View File

@@ -13,7 +13,7 @@ $ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool
```
```bash#Ubuntu/Debian
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
$ sudo apt install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
```
```bash#Arch
@@ -24,31 +24,34 @@ $ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
```
{% /codetabs %}
> **Note**: The Zig compiler is automatically installed and updated by the build scripts. Manual installation is not required.
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
{% codetabs %}
```bash#Native
$ curl -fsSL https://bun.sh/install | bash
$ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
```
```bash#npm
$ npm install -g bun
$ npm install -g bun # the last `npm` command you'll ever need
```
```bash#Homebrew
$ brew tap oven-sh/bun
$ brew tap oven-sh/bun # for macOS and Linux
$ brew install bun
```
```bash#Docker
$ docker pull oven/bun
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
```
```bash#proto
$ proto install bun
```
{% /codetabs %}
## Install LLVM
@@ -76,10 +79,6 @@ $ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
```
```bash#openSUSE Tumbleweed
$ sudo zypper install clang16 lld16 llvm16
```
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
@@ -137,14 +136,12 @@ $ cmake -S . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug
$ ninja -C build # 'bun run build' runs just this
```
Advanced users can pass CMake flags to customize the build.
Advanced uses can pass CMake flags to customize the build.
## VSCode
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig` (`zig.exe` on Windows).
## Code generation scripts
{% callout %}
@@ -307,7 +304,8 @@ $ xcode-select --install
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
$ cmake -Bbuild -GNinja -DUSE_STATIC_LIBATOMIC=ON
$ ninja -Cbuild
```
The built version of Bun may not work on other systems if compiled this way.

View File

@@ -30,20 +30,17 @@ Click the link in the right column to jump to the associated documentation.
---
- File I/O
- [`Bun.file`](/docs/api/file-io#reading-files-bun-file)
[`Bun.write`](/docs/api/file-io#writing-files-bun-write)
- [`Bun.file`](/docs/api/file-io#reading-files-bun-file) [`Bun.write`](/docs/api/file-io#writing-files-bun-write)
---
- Child processes
- [`Bun.spawn`](/docs/api/spawn#spawn-a-process-bun-spawn)
[`Bun.spawnSync`](/docs/api/spawn#blocking-api-bun-spawnsync)
- [`Bun.spawn`](/docs/api/spawn#spawn-a-process-bun-spawn) [`Bun.spawnSync`](/docs/api/spawn#blocking-api-bun-spawnsync)
---
- TCP
- [`Bun.listen`](/docs/api/tcp#start-a-server-bun-listen)
[`Bun.connect`](/docs/api/tcp#start-a-server-bun-listen)
- [`Bun.listen`](/docs/api/tcp#start-a-server-bun-listen) [`Bun.connect`](/docs/api/tcp#start-a-server-bun-listen)
---
@@ -63,8 +60,7 @@ Click the link in the right column to jump to the associated documentation.
---
- Hashing
- [`Bun.hash`](/docs/api/hashing#bun-hash)
[`Bun.CryptoHasher`](/docs/api/hashing#bun-cryptohasher)
- [`Bun.hash`](/docs/api/hashing#bun-hash) [`Bun.CryptoHasher`](/docs/api/hashing#bun-cryptohasher)
---
@@ -104,26 +100,6 @@ Click the link in the right column to jump to the associated documentation.
---
- Utilities
- [`Bun.version`](/docs/api/utils#bun-version)
[`Bun.revision`](/docs/api/utils#bun-revision)
[`Bun.env`](/docs/api/utils#bun-env)
[`Bun.main`](/docs/api/utils#bun-main)
[`Bun.sleep()`](/docs/api/utils#bun-sleep)
[`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync)
[`Bun.which()`](/docs/api/utils#bun-which)
[`Bun.peek()`](/docs/api/utils#bun-peek)
[`Bun.openInEditor()`](/docs/api/utils#bun-openineditor)
[`Bun.deepEquals()`](/docs/api/utils#bun-deepequals)
[`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml)
[`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath)
[`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl)
[`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync)
[`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync)
[`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync)
[`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync)
[`Bun.inspect()`](/docs/api/utils#bun-inspect)
[`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds)
[`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto)
[`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
- [`Bun.version`](/docs/api/utils#bun-version) [`Bun.revision`](/docs/api/utils#bun-revision) [`Bun.env`](/docs/api/utils#bun-env) [`Bun.main`](/docs/api/utils#bun-main) [`Bun.sleep()`](/docs/api/utils#bun-sleep) [`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync) [`Bun.which()`](/docs/api/utils#bun-which) [`Bun.peek()`](/docs/api/utils#bun-peek) [`Bun.openInEditor()`](/docs/api/utils#bun-openineditor) [`Bun.deepEquals()`](/docs/api/utils#bun-deepequals) [`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml) [`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath) [`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl) [`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync) [`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync) [`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync) [`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync) [`Bun.inspect()`](/docs/api/utils#bun-inspect) [`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds) [`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto) [`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
{% /table %}

View File

@@ -426,94 +426,4 @@ editor = "code"
# - "nvim", "neovim"
# - "vim","vi"
# - "emacs"
```
-->
## `bun run`
The `bun run` command can be configured under the `[run]` section. These apply to the `bun run` command and the `bun` command when running a file or executable or script.
Currently, `bunfig.toml` isn't always automatically loaded for `bun run` in a local project (it does check for a global `bunfig.toml`), so you might still need to pass `-c` or `-c=bunfig.toml` to use these settings.
### `run.shell` - use the system shell or Bun's shell
The shell to use when running package.json scripts via `bun run` or `bun`. On Windows, this defaults to `"bun"` and on other platforms it defaults to `"system"`.
To always use the system shell instead of Bun's shell (default behavior unless Windows):
```toml
[run]
# default outside of Windows
shell = "system"
```
To always use Bun's shell instead of the system shell:
```toml
[run]
# default on Windows
shell = "bun"
```
### `run.bun` - auto alias `node` to `bun`
When `true`, this prepends `$PATH` with a `node` symlink that points to the `bun` binary for all scripts or executables invoked by `bun run` or `bun`.
This means that if you have a script that runs `node`, it will actually run `bun` instead, without needing to change your script. This works recursively, so if your script runs another script that runs `node`, it will also run `bun` instead. This applies to shebangs as well, so if you have a script with a shebang that points to `node`, it will actually run `bun` instead.
By default, this is enabled if `node` is not already in your `$PATH`.
```toml
[run]
# equivalent to `bun --bun` for all `bun run` commands
bun = true
```
You can test this by running:
```sh
$ bun --bun which node # /path/to/bun
$ bun which node # /path/to/node
```
This option is equivalent to prefixing all `bun run` commands with `--bun`:
```sh
bun --bun run dev
bun --bun dev
bun run --bun dev
```
If set to `false`, this will disable the `node` symlink.
### `run.silent` - suppress reporting the command being run
When `true`, suppresses the output of the command being run by `bun run` or `bun`.
```toml
[run]
silent = true
```
Without this option, the command being run will be printed to the console:
```sh
$ bun run dev
> $ echo "Running \"dev\"..."
Running "dev"...
```
With this option, the command being run will not be printed to the console:
```sh
$ bun run dev
Running "dev"...
```
This is equivalent to passing `--silent` to all `bun run` commands:
```sh
bun --silent run dev
bun --silent dev
bun run --silent dev
```
``` -->

View File

@@ -98,10 +98,10 @@ Bun.env.API_TOKEN; // => "secret"
import.meta.env.API_TOKEN; // => "secret"
```
To print all currently-set environment variables to the command line, run `bun --print process.env`. This is useful for debugging.
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
```sh
$ bun --print process.env
$ bun run env
BAZ=stuff
FOOBAR=aaaaaa
<lots more lines>
@@ -163,16 +163,6 @@ These environment variables are read by Bun and configure aspects of its behavio
---
- `BUN_CONFIG_MAX_HTTP_REQUESTS`
- Control the maximum number of concurrent HTTP requests sent by fetch and `bun install`. Defaults to `256`. If you are running into rate limits or connection issues, you can reduce this number.
---
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
---
- `DO_NOT_TRACK`
- Telemetry is not sent yet as of November 28th, 2023, but we are planning to add telemetry in the coming months. If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.

View File

@@ -18,7 +18,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
### [`node:child_process`](https://nodejs.org/api/child_process.html)
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC cannot send socket handles and only works with other `bun` processes.
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC has partial support and only current only works with other `bun` processes.
### [`node:cluster`](https://nodejs.org/api/cluster.html)
@@ -56,7 +56,7 @@ Some methods are not optimized yet.
### [`node:fs`](https://nodejs.org/api/fs.html)
🟡 Missing `statfs` `statfsSync`, `opendirSync`. `Dir` is partially implemented.
🟡 Missing `Dir` `openAsBlob` `opendir` `opendirSync` `statfs` `statfsSync`
### [`node:http`](https://nodejs.org/api/http.html)
@@ -148,7 +148,7 @@ Some methods are not optimized yet.
### [`node:url`](https://nodejs.org/api/url.html)
🟢 Fully implemented.
🟡 Missing `domainToASCII` `domainToUnicode`. It's recommended to use `URL` and `URLSearchParams` globals instead.
### [`node:util`](https://nodejs.org/api/util.html)
@@ -432,7 +432,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
🟡 `URL.createObjectURL` is missing. See [Issue #3925](https://github.com/oven-sh/bun/issues/3925)
🟢 Fully implemented.
### [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)

View File

@@ -63,7 +63,7 @@ Plugins are primarily used to extend Bun with loaders for additional file types.
```ts#yamlPlugin.ts
import { plugin } from "bun";
await plugin({
plugin({
name: "YAML",
async setup(build) {
const { load } = await import("js-yaml");
@@ -179,7 +179,7 @@ Loading a YAML file is useful, but plugins support more than just data loading.
```ts#sveltePlugin.ts
import { plugin } from "bun";
await plugin({
plugin({
name: "svelte loader",
async setup(build) {
const { compile } = await import("svelte/compiler");

View File

@@ -1,5 +1,9 @@
Bun Shell makes shell scripting with JavaScript & TypeScript fun. It's a cross-platform bash-like shell with seamless JavaScript interop.
{% callout type="note" %}
**Alpha-quality software**: Bun Shell is an unstable API still under development. If you have feature requests or run into bugs, please open an issue. There may be breaking changes in the future.
{% /callout %}
Quickstart:
```js
@@ -8,7 +12,7 @@ import { $ } from "bun";
const response = await fetch("https://example.com");
// Use Response as stdin.
await $`cat < ${response} | wc -c`; // 1256
await $`echo < ${response} > wc -c`; // 120
```
## Features:
@@ -19,8 +23,6 @@ await $`cat < ${response} | wc -c`; // 1256
- **Template literals**: Template literals are used to execute shell commands. This allows for easy interpolation of variables and expressions.
- **Safety**: Bun Shell escapes all strings by default, preventing shell injection attacks.
- **JavaScript interop**: Use `Response`, `ArrayBuffer`, `Blob`, `Bun.file(path)` and other JavaScript objects as stdin, stdout, and stderr.
- **Shell scripting**: Bun Shell can be used to run shell scripts (`.bun.sh` files).
- **Custom interpreter**: Bun Shell is written in Zig, along with it's lexer, parser, and interpreter. Bun Shell is a small programming language.
## Getting started
@@ -51,85 +53,23 @@ const welcome = await $`echo "Hello World!"`.text();
console.log(welcome); // Hello World!\n
```
By default, `await`ing will return stdout and stderr as `Buffer`s.
To get stdout, stderr, and the exit code, use await or `.run`:
```js
import { $ } from "bun";
const { stdout, stderr } = await $`echo "Hello World!"`.quiet();
const { stdout, stderr, exitCode } = await $`echo "Hello World!"`.quiet();
console.log(stdout); // Buffer(6) [ 72, 101, 108, 108, 111, 32 ]
console.log(stderr); // Buffer(0) []
```
## Error handling
By default, non-zero exit codes will throw an error. This `ShellError` contains information about the command run.
```js
import { $ } from "bun";
try {
const output = await $`something-that-may-fail`.text();
console.log(output);
} catch (err) {
console.log(`Failed with code ${err.exitCode}`);
console.log(output.stdout.toString());
console.log(output.stderr.toString());
}
```
Throwing can be disabled with `.nothrow()`. The result's `exitCode` will need to be checked manually.
```js
import { $ } from "bun";
const { stdout, stderr, exitCode } = await $`something-that-may-fail`
.nothrow()
.quiet();
if (exitCode !== 0) {
console.log(`Non-zero exit code ${exitCode}`);
}
console.log(stdout);
console.log(stderr);
```
The default handling of non-zero exit codes can be configured by calling `.nothrow()` or `.throws(boolean)` on the `$` function itself.
```js
import { $ } from "bun";
// shell promises will not throw, meaning you will have to
// check for `exitCode` manually on every shell command.
$.nothrow(); // equivilent to $.throws(false)
// default behavior, non-zero exit codes will throw an error
$.throws(true);
// alias for $.nothrow()
$.throws(false);
await $`something-that-may-fail`; // No exception thrown
console.log(exitCode); // 0
```
## Redirection
A command's _input_ or _output_ may be _redirected_ using the typical Bash operators:
Bun Shell supports redirection with `<`, `>`, and `|` operators.
- `<` redirect stdin
- `>` or `1>` redirect stdout
- `2>` redirect stderr
- `&>` redirect both stdout and stderr
- `>>` or `1>>` redirect stdout, _appending_ to the destination, instead of overwriting
- `2>>` redirect stderr, _appending_ to the destination, instead of overwriting
- `&>>` redirect both stdout and stderr, _appending_ to the destination, instead of overwriting
- `1>&2` redirect stdout to stderr (all writes to stdout will instead be in stderr)
- `2>&1` redirect stderr to stdout (all writes to stderr will instead be in stdout)
Bun Shell also supports redirecting from and to JavaScript objects.
### Example: Redirect output to JavaScript objects (`>`)
### To JavaScript objects (`>`)
To redirect stdout to a JavaScript object, use the `>` operator:
@@ -137,8 +77,9 @@ To redirect stdout to a JavaScript object, use the `>` operator:
import { $ } from "bun";
const buffer = Buffer.alloc(100);
await $`echo "Hello World!" > ${buffer}`;
const result = await $`echo "Hello World!" > ${buffer}`;
console.log(result.exitCode); // 0
console.log(buffer.toString()); // Hello World!\n
```
@@ -147,12 +88,12 @@ The following JavaScript objects are supported for redirection to:
- `Buffer`, `Uint8Array`, `Uint16Array`, `Uint32Array`, `Int8Array`, `Int16Array`, `Int32Array`, `Float32Array`, `Float64Array`, `ArrayBuffer`, `SharedArrayBuffer` (writes to the underlying buffer)
- `Bun.file(path)`, `Bun.file(fd)` (writes to the file)
### Example: Redirect input from JavaScript objects (`<`)
### From JavaScript objects (`<`)
To redirect the output from JavaScript objects to stdin, use the `<` operator:
```js
import { $ } from "bun";
import { $, file } from "bun";
const response = new Response("hello i am a response body");
@@ -167,51 +108,7 @@ The following JavaScript objects are supported for redirection from:
- `Bun.file(path)`, `Bun.file(fd)` (reads from the file)
- `Response` (reads from the body)
### Example: Redirect stdin -> file
```js
import { $ } from "bun";
await $`cat < myfile.txt`;
```
### Example: Redirect stdout -> file
```js
import { $ } from "bun";
await $`echo bun! > greeting.txt`;
```
### Example: Redirect stderr -> file
```js
import { $ } from "bun";
await $`bun run index.ts 2> errors.txt`;
```
### Example: Redirect stdout -> stderr
```js
import { $ } from "bun";
// redirects stderr to stdout, so all output
// will be available on stdout
await $`bun run ./index.ts 2>&1`;
```
### Example: Redirect stderr -> stdout
```js
import { $ } from "bun";
// redirects stdout to stderr, so all output
// will be available on stderr
await $`bun run ./index.ts 1>&2`;
```
## Piping (`|`)
### Piping (`|`)
Like in bash, you can pipe the output of one command to another:
@@ -399,18 +296,6 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
- `echo`: print text
- `pwd`: print the working directory
- `bun`: run bun in bun
- `cat`
- `touch`
- `mkdir`
- `which`
- `mv`
- `exit`
- `true`
- `false`
- `yes`
- `seq`
- `dirname`
- `basename`
**Partially** implemented:
@@ -418,7 +303,9 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
**Not** implemented yet, but planned:
- See https://github.com/oven-sh/bun/issues/9716 for the full list.
- `mkdir`: create directories
- `cp`: copy files and directories
- `cat`: concatenate files
## Utilities
@@ -442,7 +329,7 @@ Exposes Bun Shell's escaping logic as a function:
```js
import { $ } from "bun";
console.log($.escape('$(foo) `bar` "baz"'));
console.log($.escape('$(foo) `bar` "baz"'))
// => \$(foo) \`bar\` \"baz\"
```
@@ -451,38 +338,33 @@ If you do not want your string to be escaped, wrap it in a `{ raw: 'str' }` obje
```js
import { $ } from "bun";
await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`;
await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`
// => bun: command not found: foo
// => bun: command not found: bar
// => baz
```
## .sh file loader
## .bun.sh file loader
For simple shell scripts, instead of `/bin/sh`, you can use Bun Shell to run shell scripts.
For simple shell scripts, instead of `sh`, you can use Bun Shell to run shell scripts.
To do so, just run the script with `bun` on a file with the `.sh` extension.
```sh#script.sh
echo "Hello World! pwd=$(pwd)"
```
To do that, run any file with bun that ends with `.bun.sh`:
```sh
$ echo "echo Hello World!" > script.bun.sh
$ bun ./script.bun.sh
> Hello World!
```
On Windows, Bun Shell is used automatically to run `.sh` files when using Bun:
```sh
$ echo "echo Hello World!" > script.sh
# On windows, .bun.sh is not needed, just .sh
$ bun ./script.sh
Hello World! pwd=/home/demo
> Hello World!
```
Scripts with Bun Shell are cross platform, which means they work on Windows:
```powershell
> bun .\script.sh
Hello World! pwd=C:\Users\Demo
```
## Implementation notes
Bun Shell is a small programming language in Bun that is implemented in Zig. It includes a handwritten lexer, parser, and interpreter. Unlike bash, zsh, and other shells, Bun Shell runs operations concurrently.
## Credits
Large parts of this API were inspired by [zx](https://github.com/google/zx), [dax](https://github.com/dsherret/dax), and [bnx](https://github.com/wobsoriano/bnx). Thank you to the authors of those projects.

View File

@@ -57,7 +57,7 @@ coverageThreshold = { lines = 0.9, functions = 0.9 }
### Sourcemaps
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `false`; this will rarely be desirable outside of advanced use cases.
```toml
[test]

View File

@@ -156,8 +156,6 @@ test.if(macOS)("runs on macOS", () => {
});
```
## `test.skipIf`
To instead skip a test based on some condition, use `test.skipIf()` or `describe.skipIf()`.
```ts
@@ -168,32 +166,16 @@ test.skipIf(macOS)("runs on non-macOS", () => {
});
```
## `test.todoIf`
If instead you want to mark the test as TODO, use `test.todoIf()` or `describe.todoIf()`. Carefully choosing `skipIf` or `todoIf` can show a difference between, for example, intent of "invalid for this target" and "planned but not implemented yet."
```ts
const macOS = process.arch === "darwin";
// TODO: we've only implemented this for Linux so far.
test.todoIf(macOS)("runs on posix", () => {
// runs if *not* macOS
});
```
## `test.each`
To return a function for multiple cases in a table of tests, use `test.each`.
```ts
const cases = [
[1, 2, 3],
[3, 4, 5],
];
const cases = [[1, 2, 3], [3, 4, 5]];
test.each(cases)("%p + %p should be %p", (a, b, expected) => {
// runs once for each test case provided
});
// runs once for each test case provided
})
```
There are a number of options available for formatting the case label depending on its type.
@@ -345,7 +327,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.assertions()`](https://jestjs.io/docs/expect#expectassertionsnumber)
---
@@ -355,7 +337,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.hasAssertions()`](https://jestjs.io/docs/expect#expecthasassertions)
---

View File

@@ -21,7 +21,7 @@ const withExtensions = [
return !!json[key]?.extensions?.length;
})
.flatMap(mime => {
return [...new Set(json[mime].extensions)].map(ext => {
return [...new Set([...json[mime].extensions])].map(ext => {
return [`.{.@"${ext}", all.@"${mime}"}`];
});
})

View File

@@ -2,13 +2,14 @@
"private": true,
"name": "bun",
"dependencies": {
"@biomejs/biome": "1.5.3",
"@vscode/debugadapter": "^1.61.0",
"esbuild": "^0.17.15",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"mitata": "^0.1.3",
"peechy": "0.4.34",
"prettier": "^3.2.5",
"prettier": "3.2.2",
"react": "next",
"react-dom": "next",
"source-map-js": "^1.0.2",
@@ -24,14 +25,14 @@
"build": "if [ ! -e build ]; then bun setup; fi && ninja -C build",
"build:valgrind": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-valgrind && ninja -Cbuild-valgrind",
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
"fmt": "prettier --write --cache './{.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}/**/*.{mjs,ts,tsx,js,jsx}'",
"fmt": "biome format --write {.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}",
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun"
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun",
"update-known-failures": "node packages/bun-internal-test/src/update-known-windows-failures.mjs"
}
}

Binary file not shown.

Binary file not shown.

View File

@@ -7,11 +7,9 @@
"runners/qunit"
],
"dependencies": {
"@actions/core": "latest",
"p-queue": "^8.0.1"
"@actions/core": "latest"
},
"devDependencies": {
"@types/p-queue": "^3.2.1",
"bun-types": "canary",
"prettier": "^2.8.2"
},

View File

@@ -1,91 +1,49 @@
import * as action from "@actions/core";
import { spawn, spawnSync } from "child_process";
import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, close, closeSync } from "fs";
import { readFile, rm } from "fs/promises";
import { rmSync, writeFileSync, readFileSync } from "fs";
import { readFile } from "fs/promises";
import { readdirSync } from "node:fs";
import { resolve, basename } from "node:path";
import { constants, cpus, hostname, tmpdir, totalmem, userInfo } from "os";
import { join, normalize } from "path";
import { cpus, hostname, totalmem, userInfo } from "os";
import { fileURLToPath } from "url";
import PQueue from "p-queue";
const run_start = new Date();
const TIMEOUT_DURATION = 1000 * 60 * 5;
const SHORT_TIMEOUT_DURATION = Math.ceil(TIMEOUT_DURATION / 5);
function defaultConcurrency() {
// This causes instability due to the number of open file descriptors / sockets in some tests
// Windows has higher limits
if (process.platform !== "win32") {
return 1;
}
return Math.min(Math.floor((cpus().length - 2) / 2), 2);
}
const windows = process.platform === "win32";
const KEEP_TMPDIR = process.env["BUN_KEEP_TMPDIR"] === "1";
const nativeMemory = totalmem();
const force_ram_size_input = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
let force_ram_size = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
if (!(Number.isSafeInteger(force_ram_size_input) && force_ram_size_input > 0)) {
force_ram_size = force_ram_size_input + "";
}
function uncygwinTempDir() {
if (process.platform === "win32") {
for (let key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP"]) {
let TMPDIR = process.env[key] || "";
if (!/^\/[a-zA-Z]\//.test(TMPDIR)) {
continue;
}
const driveLetter = TMPDIR[1];
TMPDIR = path.win32.normalize(`${driveLetter.toUpperCase()}:` + TMPDIR.substring(2));
process.env[key] = TMPDIR;
}
}
}
uncygwinTempDir();
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
process.chdir(cwd);
const ci = !!process.env["GITHUB_ACTIONS"];
const enableProgressBar = false;
const enableProgressBar = !ci;
const dirPrefix = "bun-test-tmp-" + ((Math.random() * 100_000_0) | 0).toString(36) + "_";
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const queue = new PQueue({ concurrency: run_concurrency });
var prevTmpdir = "";
function maketemp() {
prevTmpdir = join(
tmpdir(),
dirPrefix + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36),
);
mkdirSync(prevTmpdir, { recursive: true });
return prevTmpdir;
function defaultConcurrency() {
// Concurrency causes more flaky tests, only enable it by default on windows
// See https://github.com/oven-sh/bun/issues/8071
if (windows) {
return Math.floor((cpus().length - 2) / 2);
}
return 1;
}
const extensions = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs", ".mts", ".cts", ".mjsx", ".cjsx", ".mtsx", ".ctsx"];
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const extensions = [".js", ".ts", ".jsx", ".tsx"];
const git_sha =
process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim();
const TEST_FILTER = process.env.BUN_TEST_FILTER;
function isTest(path) {
if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) {
return false;
}
if (TEST_FILTER) {
if (!path.includes(TEST_FILTER)) {
return false;
}
}
return true;
}
@@ -100,15 +58,8 @@ function* findTests(dir, query) {
}
}
let bunExe = "bun";
if (process.argv.length > 2) {
bunExe = resolve(process.argv.at(-1));
} else if (process.env.BUN_PATH) {
const { BUN_PATH_BASE, BUN_PATH } = process.env;
bunExe = resolve(normalize(BUN_PATH_BASE), normalize(BUN_PATH));
}
// pick the last one, kind of a hack to allow 'bun run test bun-release' to test the release build
let bunExe = (process.argv.length > 2 ? process.argv[process.argv.length - 1] : null) ?? "bun";
const { error, stdout: revision_stdout } = spawnSync(bunExe, ["--revision"], {
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
});
@@ -147,176 +98,59 @@ function lookupWindowsError(code) {
const failing_tests = [];
const passing_tests = [];
let maxFd = -1;
function getMaxFileDescriptor(path) {
if (process.platform === "win32") {
return -1;
}
const fixes = [];
const regressions = [];
hasInitialMaxFD = true;
if (process.platform === "linux") {
try {
readdirSync("/proc/self/fd").forEach(name => {
const fd = parseInt(name.trim(), 10);
if (Number.isSafeInteger(fd) && fd >= 0) {
maxFd = Math.max(maxFd, fd);
}
});
return maxFd;
} catch {}
}
const devnullfd = openSync("/dev/null", "r");
closeSync(devnullfd);
maxFd = devnullfd + 1;
return maxFd;
}
let hasInitialMaxFD = false;
const activeTests = new Map();
let slowTestCount = 0;
function checkSlowTests() {
const now = Date.now();
const prevSlowTestCount = slowTestCount;
slowTestCount = 0;
for (const [path, { start, proc }] of activeTests) {
if (proc && now - start >= TIMEOUT_DURATION) {
console.error(
`\x1b[31merror\x1b[0;2m:\x1b[0m Killing test ${JSON.stringify(path)} after ${Math.ceil((now - start) / 1000)}s`,
);
proc?.stdout?.destroy?.();
proc?.stderr?.destroy?.();
proc?.kill?.();
} else if (now - start > SHORT_TIMEOUT_DURATION) {
console.error(
`\x1b[33mwarning\x1b[0;2m:\x1b[0m Test ${JSON.stringify(path)} has been running for ${Math.ceil(
(now - start) / 1000,
)}s`,
);
slowTestCount++;
}
}
if (slowTestCount > prevSlowTestCount && queue.concurrency > 1) {
queue.concurrency += 1;
}
}
setInterval(checkSlowTests, SHORT_TIMEOUT_DURATION).unref();
var currentTestNumber = 0;
async function runTest(path) {
const thisTestNumber = currentTestNumber++;
const name = path.replace(cwd, "").slice(1);
let exitCode, signal, err, output;
const expected_crash_reason = windows
? await readFile(resolve(path), "utf-8").then(data => {
const match = data.match(/@known-failing-on-windows:(.*)\n/);
return match ? match[1].trim() : null;
})
: null;
const start = Date.now();
const activeTestObject = { start, proc: undefined };
activeTests.set(path, activeTestObject);
try {
await new Promise((finish, reject) => {
const chunks = [];
process.stderr.write(
`
at ${((start - run_start.getTime()) / 1000).toFixed(2)}s, file ${thisTestNumber
.toString()
.padStart(total.toString().length, "0")}/${total}, ${failing_tests.length} failing files
Starting "${name}"
`,
);
const TMPDIR = maketemp();
const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"],
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
BUN_INSTALL_CACHE_DIR: join(TMPDIR, ".bun-install-cache"),
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
},
});
activeTestObject.proc = proc;
proc.stdout.once("end", () => {
done();
});
let doneCalls = 0;
var done = () => {
// TODO: wait for stderr as well
// spawn.test currently causes it to hang
if (doneCalls++ === 1) {
actuallyDone();
}
};
var actuallyDone = function () {
actuallyDone = done = () => {};
proc?.stderr?.unref?.();
proc?.stdout?.unref?.();
proc?.unref?.();
output = Buffer.concat(chunks).toString();
finish();
};
// if (!KEEP_TMPDIR)
// proc.once("close", () => {
// rm(TMPDIR, { recursive: true, force: true }).catch(() => {});
// });
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.once("close", () => {
activeTestObject.proc = undefined;
});
proc.once("exit", (code_, signal_) => {
activeTestObject.proc = undefined;
exitCode = code_;
signal = signal_;
if (signal || exitCode !== 0) {
actuallyDone();
} else {
done();
}
});
proc.once("error", err_ => {
activeTestObject.proc = undefined;
err = err_;
actuallyDone();
});
await new Promise((done, reject) => {
const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"],
timeout: 1000 * 60 * 3,
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
// reproduce CI results locally
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
},
});
} finally {
activeTests.delete(path);
}
if (!hasInitialMaxFD) {
getMaxFileDescriptor();
} else if (maxFd > 0) {
const prevMaxFd = maxFd;
maxFd = getMaxFileDescriptor();
if (maxFd > prevMaxFd + queue.concurrency * 2) {
process.stderr.write(
`\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${name}, delta: ${
maxFd - prevMaxFd
}, current: ${maxFd}, previous: ${prevMaxFd}\n`,
);
}
}
const chunks = [];
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.on("exit", (code_, signal_) => {
exitCode = code_;
signal = signal_;
output = Buffer.concat(chunks).toString();
done();
});
proc.on("error", err_ => {
err = err_;
done();
});
});
const passed = exitCode === 0 && !err && !signal;
@@ -361,8 +195,7 @@ Starting "${name}"
}
console.log(
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
passed ? "\x1b[32m✔" : "\x1b[31m✖"
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
} ${name}\x1b[0m${reason ? ` (${reason})` : ""}`,
);
@@ -377,17 +210,28 @@ Starting "${name}"
}
if (!passed) {
failing_tests.push({ path: name, reason, output });
process.exitCode = 1;
if (reason) {
if (windows && !expected_crash_reason) {
regressions.push({ path: name, reason, output });
}
}
failing_tests.push({ path: name, reason, output, expected_crash_reason });
if (err) console.error(err);
} else {
if (windows && expected_crash_reason !== null) {
fixes.push({ path: name, output, expected_crash_reason });
}
passing_tests.push(name);
}
return passed;
}
var finished = 0;
const queue = [...findTests(resolve(cwd, "test"))];
let running = 0;
let total = queue.length;
let finished = 0;
let on_entry_finish = null;
function writeProgressBar() {
const barWidth = Math.min(process.stdout.columns || 40, 80) - 2;
@@ -397,23 +241,34 @@ function writeProgressBar() {
process.stdout.write(`\r${str1}${" ".repeat(barWidth - str1.length)}]`);
}
const allTests = [...findTests(resolve(cwd, "test"))];
console.log(`Starting ${allTests.length} tests with ${run_concurrency} concurrency...`);
let total = allTests.length;
for (const path of allTests) {
queue.add(
async () =>
await runTest(path).catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
}),
);
while (queue.length > 0) {
if (running >= run_concurrency) {
await new Promise(resolve => (on_entry_finish = resolve));
continue;
}
const path = queue.shift();
running++;
runTest(path)
.catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
})
.finally(() => {
running--;
if (on_entry_finish) {
on_entry_finish();
on_entry_finish = null;
}
});
}
while (running > 0) {
await Promise.race([
new Promise(resolve => (on_entry_finish = resolve)),
new Promise(resolve => setTimeout(resolve, 1000)),
]);
}
await queue.onIdle();
console.log(`
Completed ${total} tests with ${failing_tests.length} failing tests
`);
console.log("\n");
function linkToGH(linkTo) {
@@ -424,13 +279,10 @@ function sectionLink(linkTo) {
return "#" + linkTo.replace(/[^a-zA-Z0-9_-]/g, "").toLowerCase();
}
failing_tests.sort((a, b) => a.path.localeCompare(b.path));
passing_tests.sort((a, b) => a.localeCompare(b));
const failingTestDisplay = failing_tests
.filter(({ reason }) => !regressions.some(({ path }) => path === path))
.map(({ path, reason }) => `- [\`${path}\`](${sectionLink(path)})${reason ? ` ${reason}` : ""}`)
.join("\n");
// const passingTestDisplay = passing_tests.map(path => `- \`${path}\``).join("\n");
rmSync("report.md", { force: true });
@@ -467,10 +319,9 @@ console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n"
console.log(header);
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
let report = `# bun test on ${
process.env["GITHUB_REF"] ??
let report = `# bun test on ${process.env["GITHUB_REF"] ??
spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim()
}
}
\`\`\`
${header}
@@ -478,8 +329,31 @@ ${header}
`;
if (fixes.length > 0) {
report += `## Fixes\n\n`;
report += "The following tests had @known-failing-on-windows but now pass:\n\n";
report += fixes
.map(
({ path, expected_crash_reason }) => `- [\`${path}\`](${sectionLink(path)}) (before: ${expected_crash_reason})`,
)
.join("\n");
report += "\n\n";
}
if (regressions.length > 0) {
report += `## Regressions\n\n`;
report += regressions
.map(
({ path, reason, expected_crash_reason }) =>
`- [\`${path}\`](${sectionLink(path)}) ${reason}${expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
}`,
)
.join("\n");
report += "\n\n";
}
if (failingTestDisplay.length > 0) {
report += `## Failing tests\n\n`;
report += `## ${windows ? "Known " : ""}Failing tests\n\n`;
report += failingTestDisplay;
report += "\n\n";
}
@@ -492,10 +366,17 @@ if (failingTestDisplay.length > 0) {
if (failing_tests.length) {
report += `## Failing tests log output\n\n`;
for (const { path, output, reason } of failing_tests) {
for (const { path, output, reason, expected_crash_reason } of failing_tests) {
report += `### ${path}\n\n`;
report += "[Link to file](" + linkToGH(path) + ")\n\n";
report += `${reason}\n\n`;
if (windows && reason !== expected_crash_reason) {
report += `To mark this as a known failing test, add this to the start of the file:\n`;
report += `\`\`\`ts\n`;
report += `// @known-failing-on-windows: ${reason}\n`;
report += `\`\`\`\n\n`;
} else {
report += `${reason}\n\n`;
}
report += "```\n";
report += output
.replace(/\x1b\[[0-9;]*m/g, "")
@@ -510,12 +391,18 @@ writeFileSync(
JSON.stringify({
failing_tests,
passing_tests,
fixes,
regressions,
}),
);
console.log("-> test-report.md, test-report.json");
if (ci) {
if (windows) {
action.setOutput("regressing_tests", regressions.map(({ path }) => `- \`${path}\``).join("\n"));
action.setOutput("regressing_test_count", regressions.length);
}
if (failing_tests.length > 0) {
action.setFailed(`${failing_tests.length} files with failing tests`);
}
@@ -527,6 +414,12 @@ if (ci) {
}
action.summary.addRaw(truncated_report);
await action.summary.write();
} else {
if (windows && (regressions.length > 0 || fixes.length > 0)) {
console.log(
"\n\x1b[34mnote\x1b[0;2m:\x1b[0m If you would like to update the @known-failing-on-windows annotations, run `bun update-known-failures`",
);
}
}
process.exit(failing_tests.length ? 1 : process.exitCode);
process.exit(failing_tests.length ? 1 : 0);

View File

@@ -0,0 +1,49 @@
import assert from "assert";
import { existsSync, readFileSync, writeFileSync } from "fs";
import { join } from "path";
import { fileURLToPath } from "url";
if (process.platform !== "win32") {
console.log("This script is only intended to be run on Windows.");
process.exit(1);
}
process.chdir(join(fileURLToPath(import.meta.url), "../../../../"));
if (!existsSync("test-report.json")) {
console.log("No test report found. Please run `bun run test` first.");
process.exit(1);
}
const test_report = JSON.parse(readFileSync("test-report.json", "utf8"));
assert(Array.isArray(test_report.failing_tests));
for (const { path, reason, expected_crash_reason } of test_report.failing_tests) {
assert(path);
assert(reason);
if (expected_crash_reason !== reason) {
const old_content = readFileSync(path, "utf8");
if (!old_content.includes("// @known-failing-on-windows")) {
let content = old_content.replace(/\/\/\s*@known-failing-on-windows:.*\n/, "");
if (reason) {
content = `// @known-failing-on-windows: ${reason}\n` + content;
}
writeFileSync(path, content, "utf8");
console.log(path);
}
}
}
for (const { path } of test_report.fixes) {
assert(path);
const old_content = readFileSync(path, "utf8");
let content = old_content.replace(/\/\/\s*@known-failing-on-windows:.*\n/, "");
if (content !== old_content) {
writeFileSync(path, content, "utf8");
console.log(path);
}
}

Binary file not shown.

View File

@@ -32,20 +32,20 @@ export class PublishCommand extends BuildCommand {
}
const { layer, region, arch, output, public: isPublic } = flags;
if (region.includes("*")) {
// prettier-ignore
// biome-ignore: format ignore
const result = this.#aws(["ec2", "describe-regions", "--query", "Regions[].RegionName", "--output", "json"]);
region.length = 0;
for (const name of JSON.parse(result)) {
region.push(name);
}
} else if (!region.length) {
// prettier-ignore
// biome-ignore: format ignore
region.push(this.#aws(["configure", "get", "region"]));
}
this.log("Publishing...");
for (const regionName of region) {
for (const layerName of layer) {
// prettier-ignore
// biome-ignore: format ignore
const result = this.#aws([
"lambda",
"publish-layer-version",
@@ -70,7 +70,7 @@ export class PublishCommand extends BuildCommand {
const { LayerVersionArn } = JSON.parse(result);
this.log("Published", LayerVersionArn);
if (isPublic) {
// prettier-ignore
// biome-ignore: format ignore
this.#aws([
"lambda",
"add-layer-version-permission",

Binary file not shown.

Binary file not shown.

View File

@@ -19,7 +19,9 @@ export default function polyfillImportMeta(metaIn: ImportMeta) {
dir: path.dirname(metapath),
file: path.basename(metapath),
require: require2,
resolve: metaIn.resolve,
async resolve(id: string, parent?: string) {
return this.resolveSync(id, parent);
},
resolveSync(id: string, parent?: string) {
return require2.resolve(id, {
paths: typeof parent === 'string' ? [

View File

@@ -1,8 +1,6 @@
.DS_Store
.env
node_modules
/npm/**/bin
/npm/**/*.js
/npm/**/package.json
/npm/**/.npmrc
*.tgz
.DS_Store
.env
node_modules
/npm/**/bin
/npm/**/*.js
/npm/**/.npmrc

Binary file not shown.

View File

@@ -0,0 +1,16 @@
{
"name": "@oven/bun-darwin-aarch64",
"version": "0.5.3",
"description": "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"arm64"
]
}

View File

@@ -0,0 +1,16 @@
{
"name": "@oven/bun-darwin-x64-baseline",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -0,0 +1,16 @@
{
"name": "@oven/bun-darwin-x64",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -0,0 +1,16 @@
{
"name": "@oven/bun-linux-aarch64",
"version": "0.5.3",
"description": "This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"arm64"
]
}

View File

@@ -0,0 +1,16 @@
{
"name": "@oven/bun-linux-x64-baseline",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -0,0 +1,16 @@
{
"name": "@oven/bun-linux-x64",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the Windows x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,3 +0,0 @@
# Bun
This is the Windows x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

Some files were not shown because too many files have changed in this diff Show More