Compare commits

..

2 Commits

Author SHA1 Message Date
Ashcon Partovi
147826d93c Add todoIf, fixme, and fixmeIf to bun:test 2024-02-21 17:16:04 -08:00
Ashcon Partovi
c0a2073dd5 Read .env file in .vscode/launch.json 2024-02-21 16:17:48 -08:00
521 changed files with 26546 additions and 89019 deletions

View File

@@ -1,50 +0,0 @@
name: Setup Bun
description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
baseline:
type: boolean
description: "Whether to use the baseline version of bun."
default: false
required: false
download-url:
type: string
description: "The base URL to download bun from."
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
required: false
runs:
using: composite
steps:
- name: Setup Bun
shell: bash
run: |
case "$(uname -s)" in
Linux*) os=linux;;
Darwin*) os=darwin;;
*) os=windows;;
esac
case "$(uname -m)" in
arm64 | aarch64) arch=arm64;;
*) arch=x64;;
esac
case "${{ inputs.baseline }}" in
true | 1) target="bun-${os}-${arch}-baseline";;
*) target="bun-${os}-${arch}";;
esac
case "${{ inputs.bun-version }}" in
latest) release="latest";;
canary) release="canary";;
*) release="bun-v${{ inputs.bun-version }}";;
esac
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
unzip ${target}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}

View File

@@ -9,7 +9,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -18,11 +17,11 @@ on:
- "build.zig"
- "Makefile"
- "Dockerfile"
- ".github/workflows/bun-linux-aarch64.yml"
pull_request:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -31,6 +30,7 @@ on:
- "build.zig"
- "Makefile"
- "Dockerfile"
- ".github/workflows/bun-linux-aarch64.yml"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@@ -51,14 +51,14 @@ jobs:
runner: linux-arm64
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
submodules: false
ref: ${{github.sha}}
clean: true
- run: |
bash ./scripts/update-submodules.sh
- uses: docker/setup-buildx-action@v3
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
@@ -66,7 +66,7 @@ jobs:
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -74,7 +74,7 @@ jobs:
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
@@ -113,16 +113,14 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1

View File

@@ -9,7 +9,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -22,7 +21,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -49,7 +47,7 @@ jobs:
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
runner: big-ubuntu
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
@@ -58,7 +56,7 @@ jobs:
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
runner: big-ubuntu
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
@@ -88,20 +86,28 @@ jobs:
submodules: recursive
ref: ${{github.sha}}
clean: true
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
@@ -110,6 +116,12 @@ jobs:
GIT_SHA=${{github.sha}}
ASSERTIONS=${{matrix.assertions}}
ZIG_OPTIMIZE=${{matrix.zig_optimize}}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{matrix.build_arch}}
target: ${{matrix.target}}
outputs: type=local,dest=${{runner.temp}}/release
@@ -142,16 +154,22 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -190,7 +208,7 @@ jobs:
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
linux-test:
name: Tests ${{matrix.tag}}
runs-on: namespace-profile-bun-linux-x64
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
@@ -216,7 +234,7 @@ jobs:
clean: true
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -257,18 +275,19 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
ulimit -c unlimited
ulimit -c
node packages/bun-internal-test/src/runner.node.mjs || true
# - uses: actions/upload-artifact@v4
# if: steps.test.outputs.failing_tests != ''
# with:
# name: cores
# path: /cores
# if-no-files-found: "error"
- uses: actions/upload-artifact@v3
if: steps.test.outputs.failing_tests != ''
with:
name: cores
path: /cores
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:

View File

@@ -37,7 +37,7 @@ on:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -50,15 +50,22 @@ jobs:
# - name: Checkout submodules
# run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -77,11 +84,10 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
@@ -135,7 +141,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -153,17 +159,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -223,7 +228,6 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
@@ -231,11 +235,10 @@ jobs:
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS-link:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -282,19 +285,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -309,7 +312,6 @@ jobs:
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
@@ -328,16 +330,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -394,12 +394,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -426,7 +426,6 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -37,7 +37,7 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -53,17 +53,31 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# This was used before, but also does not really work
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
@@ -71,15 +85,22 @@ jobs:
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
runs-on: ${{ matrix.runner }}
@@ -125,7 +146,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -143,17 +164,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -213,7 +233,6 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
@@ -221,11 +240,10 @@ jobs:
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -244,7 +262,7 @@ jobs:
runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
@@ -268,19 +286,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -294,7 +312,6 @@ jobs:
cd ${{runner.temp}}/link-build
cmake $SRC_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
@@ -314,16 +331,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -381,12 +396,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -413,7 +428,6 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -10,11 +10,8 @@ env:
on:
push:
branches:
- main
- ci-*
branches: [main]
paths:
- ".github/workflows/bun-mac-x64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -24,11 +21,8 @@ on:
- "Makefile"
- "Dockerfile"
pull_request:
branches:
- main
- ci-*
branches: [main]
paths:
- ".github/workflows/bun-mac-x64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -43,12 +37,67 @@ on:
jobs:
macOS-zig:
name: macOS Zig Object
uses: ./.github/workflows/zig-build.yml
secrets: inherit
with:
os: darwin
arch: x64
baseline: true
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
# - cpu: nehalem
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
arch: x86_64
tag: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v3
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# This was used before, but also does not really work
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
ARCH=${{ matrix.arch }}
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
macOS-dependencies:
name: macOS Dependencies
@@ -61,6 +110,7 @@ jobs:
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
@@ -94,7 +144,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -112,17 +162,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -135,6 +184,7 @@ jobs:
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
@@ -182,18 +232,16 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -207,12 +255,12 @@ jobs:
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-zig-x86_64-macos-none-haswell # ${{ jobs.macOS-zig.outputs.artifact }}
obj: bun-obj-darwin-x64
package: bun-darwin-x64
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
@@ -236,19 +284,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -263,7 +311,6 @@ jobs:
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
@@ -282,16 +329,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -348,12 +393,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -381,7 +426,6 @@ jobs:
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TMPDIR: ${{runner.temp}}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |

View File

@@ -51,7 +51,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
@@ -81,7 +81,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
@@ -105,7 +105,7 @@ jobs:
working-directory: packages/bun-types
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
@@ -170,12 +170,12 @@ jobs:
suffix: -distroless
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
@@ -192,12 +192,12 @@ jobs:
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
@@ -216,7 +216,7 @@ jobs:
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
@@ -252,7 +252,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:

View File

@@ -1,65 +0,0 @@
name: Test
on:
push:
workflow_call:
inputs:
runs-on:
type: string
required: true
artifact:
type: string
required: true
jobs:
test:
runs-on: ${{ inputs.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
test
packages/bun-internal-test/src
packages/bun-internal-test/package.json
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Download Bun from URL
if: ${{ startsWith(inputs.artifact, 'http://') || startsWith(inputs.artifact, 'https://') }}
shell: bash
run: |
mkdir -p ${{ runner.temp }}/bun
curl -L ${{ inputs.artifact }} -o ${{ runner.temp }}/bun/${{ inputs.artifact }}.zip
- name: Download Bun from Github Action
if: ${{ !startsWith(inputs.artifact, 'http://') && !startsWith(inputs.artifact, 'https://') }}
uses: actions/download-artifact@v4
with:
name: ${{ inputs.artifact }}
path: ${{ runner.temp }}/bun
- name: Setup Bun
shell: bash
run: |
cd ${{ runner.temp }}/bun
unzip ${{ inputs.artifact }}.zip
cd ${{ inputs.artifact }}
chmod +x bun
pwd >> $GITHUB_PATH
- name: Install Dependencies
shell: bash
run: |
bun --version
bun install
bun install --cwd=test
bun install --cwd=packages/bun-internal-test
- name: Run Tests
shell: bash
run: |
node packages/bun-internal-test/src/runner.node.mjs || true

View File

@@ -18,7 +18,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1

View File

@@ -53,15 +53,20 @@ jobs:
cpu: [haswell, nehalem]
arch: [x86_64]
name: Zig Build
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
timeout-minutes: 60
if: github.repository_owner == 'oven-sh'
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -74,7 +79,8 @@ jobs:
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -96,11 +102,10 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
windows-dependencies:
name: Dependencies
@@ -133,7 +138,7 @@ jobs:
- name: Try fetch dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: bun-deps
key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }}
@@ -160,16 +165,15 @@ jobs:
.\scripts\all-dependencies.ps1
- name: Upload Dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps/
if-no-files-found: "error"
- name: Cache Dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
@@ -200,11 +204,10 @@ jobs:
if: ${{ env.canary == 'true' }}
run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build-codegen-win32-x64/
if-no-files-found: "error"
windows-cpp:
name: C++ Build
@@ -225,7 +228,7 @@ jobs:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
@@ -260,11 +263,10 @@ jobs:
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: build/bun-cpp-objects.a
if-no-files-found: "error"
windows-link:
strategy:
@@ -286,22 +288,22 @@ jobs:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
- name: Download Dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-zig
- name: Download C++ Objects
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-cpp
@@ -334,16 +336,14 @@ jobs:
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive "$Dist" "$Dist.zip"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -398,12 +398,12 @@ jobs:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download Release
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{runner.temp}}/release
@@ -417,9 +417,6 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: 20
- uses: secondlife/setup-cygwin@v1
with:
packages: bash
- name: Install dependencies
run: |
# bun install --verbose
@@ -434,15 +431,14 @@ jobs:
name: Run tests
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
SHELLOPTS: igncr
BUN_PATH_BASE: ${{runner.temp}}
BUN_PATH: release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe
run: |
node packages/bun-internal-test/src/runner.node.mjs || true
shell: bash
try {
$ErrorActionPreference = "SilentlyContinue"
$null = node packages/bun-internal-test/src/runner.node.mjs ${{runner.temp}}/release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe || $true
} catch {}
$ErrorActionPreference = "Stop"
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:

View File

@@ -1,93 +0,0 @@
name: zig-build
on:
workflow_call:
inputs:
cpu:
type: string
required: true
runner:
type: string
required: true
env:
LLVM_VERSION: 16
jobs:
dependencies:
runs-on: ${{ inputs.runner }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
- name: Install Packages (macOS)
if: ${{ runner.os == 'macOS' }}
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install --force \
ccache \
rust \
llvm@${LLVM_VERSION} \
pkg-config \
coreutils \
libtool \
cmake \
libiconv \
automake \
openssl@1.1 \
ninja \
gnu-sed
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@${LLVM_VERSION})/bin" >> $GITHUB_PATH
brew link --overwrite llvm@${LLVM_VERSION}
- name: Hash Dependencies
id: dependencies
run: |
print_versions() {
git submodule | grep -v WebKit
llvm-config --version
rustc --version
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
}
echo "sha=$(print_versions | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
- name: Cache Dependencies
id: cache-restore
uses: actions/cache/restore@v4
with:
path: ${{ runner.temp }}/bun-deps
key: bun-deps-${{ inputs.runner }}-${{ steps.dependencies.outputs.sha }}
- name: Build Dependencies
if: ${{ !steps.cache-restore.outputs.cache-hit }}
env:
CPU_TARGET: ${{ inputs.cpu }}
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
run: |
mkdir -p $BUN_DEPS_OUT_DIR
bash ./scripts/clean-dependencies.sh
bash ./scripts/all-dependencies.sh
- name: Cache Dependencies
if: ${{ !steps.cache-restore.outputs.cache-hit }}
id: cache-save
uses: actions/cache/save@v4
with:
path: ${{ runner.temp }}/bun-deps
key: ${{ steps.cache-restore.outputs.cache-primary-key }}
- name: Upload Dependencies
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.runner }}-deps
path: ${{ runner.temp }}/bun-deps
if-no-files-found: error

View File

@@ -1,8 +1,5 @@
name: autofix.ci # Must be named this for autofix.ci to work
permissions:
contents: read
on:
workflow_dispatch:
pull_request:
@@ -13,22 +10,24 @@ on:
env:
ZIG_VERSION: 0.12.0-dev.1828+225fe6ddb
permissions:
contents: read
jobs:
format:
name: format
runs-on: ${{ vars.RUNNER_LINUX_X64 || 'ubuntu-latest' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
src
packages
test
bench
- name: Setup Bun
uses: ./.github/actions/setup-bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Setup Zig

View File

@@ -1,97 +0,0 @@
name: zig-build
on:
workflow_call:
inputs:
os:
type: string # e.g. 'linux', 'darwin', 'windows'
required: true
arch:
type: string # e.g. 'x64' or 'arm64'
required: true
baseline:
type: boolean
required: false
jobs:
zig-build:
name: zig-build
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-zig-build' || 'ubuntu-latest' }}
steps:
- name: Detect Target
run: |
case "${{ inputs.arch }}" in
x64 | x86_64)
arch="x86_64"
buildarch="amd64"
;;
arm64 | aarch64)
arch="aarch64"
buildarch="arm64"
;;
*)
echo "Unsupported architecture"
exit 1
;;
esac
case "${{ inputs.os }}" in
linux)
triplet="${arch}-linux-gnu"
;;
darwin | macos)
triplet="${arch}-macos-none"
;;
windows | win32)
triplet="${arch}-windows-msvc"
;;
*)
echo "Unsupported operating system"
exit 1
;;
esac
if [ "$arch" = "x86_64" ]; then
if [ -n "${{ inputs.baseline }}" ]; then
cpu="nehalem"
else
cpu="haswell"
fi
else
cpu="native"
fi
echo "ARCH=$arch" >> $GITHUB_ENV
echo "BUILDARCH=$buildarch" >> $GITHUB_ENV
echo "CPU_TARGET=$cpu" >> $GITHUB_ENV
echo "TRIPLET=$triplet" >> $GITHUB_ENV
artifact="bun-zig-$triplet-$cpu"
echo "ARTIFACT=$artifact" >> $GITHUB_ENV
echo "artifact=$artifact" >> $GITHUB_OUTPUT
- name: Login to Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
with:
push: false
target: build_release_obj
build-args: |
TRIPLET=${{ env.TRIPLET }}
BUILD_MACHINE_ARCH=${{ env.ARCH }}
ARCH=${{ env.ARCH }}
CPU_TARGET=${{ env.CPU_TARGET }}
GIT_SHA=${{ github.sha }}
platforms: |
linux/${{ env.ARCH }}
outputs: |
type=local,dest=${{ runner.temp }}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
with:
name: ${{ env.ARTIFACT }}
path: ${{ runner.temp }}/release/bun-zig.o
if-no-files-found: error

3
.gitignore vendored
View File

@@ -160,10 +160,9 @@ x64
/.cache
/src/deps/libuv
/build-*/
/kcov-out
.vs
**/.verdaccio-db.json
/test-report.md
/test-report.json
/test-report.json

2
.gitmodules vendored
View File

@@ -82,4 +82,4 @@ url = https://github.com/litespeedtech/ls-hpack.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
fetchRecurseSubmodules = false

View File

@@ -1,5 +0,0 @@
src/bun.js/WebKit
src/deps
test/snapshots
test/js/deno
src/react-refresh.js

View File

@@ -1,15 +0,0 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false,
"quoteProps": "preserve",
"overrides": [
{
"files": ["*.md"],
"options": {
"printWidth": 80
}
}
]
}

View File

@@ -11,7 +11,7 @@
// JavaScript
"oven.bun-vscode",
"esbenp.prettier-vscode",
"biomejs.biome",
// TypeScript
"better-ts-errors.better-ts-errors",
@@ -28,6 +28,6 @@
"tamasfe.even-better-toml",
// Other
"bierner.comment-tagged-templates"
"bierner.comment-tagged-templates",
]
}

292
.vscode/launch.json generated vendored
View File

@@ -13,27 +13,12 @@
"request": "launch",
"name": "bun test [file]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
"BUN_DEBUG_FileReader": "1"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -42,12 +27,12 @@
"request": "launch",
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -56,12 +41,26 @@
"request": "launch",
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -70,12 +69,12 @@
"request": "launch",
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--watch", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--watch", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -84,12 +83,12 @@
"request": "launch",
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--hot", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--hot", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -98,7 +97,7 @@
"request": "launch",
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -108,7 +107,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -118,7 +117,7 @@
"request": "launch",
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -128,7 +127,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -144,9 +143,9 @@
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
"console": "internalConsole",
},
{
"type": "lldb",
@@ -158,7 +157,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -172,7 +171,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -186,7 +185,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -200,7 +199,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -219,10 +218,10 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "lldb",
@@ -239,10 +238,10 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// bun test [...]
{
@@ -250,12 +249,12 @@
"request": "launch",
"name": "bun test [...]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -264,12 +263,12 @@
"request": "launch",
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -278,12 +277,26 @@
"request": "launch",
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [...] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -292,12 +305,12 @@
"request": "launch",
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -306,12 +319,12 @@
"request": "launch",
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -320,7 +333,7 @@
"request": "launch",
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -330,7 +343,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -340,7 +353,7 @@
"request": "launch",
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -350,7 +363,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -380,7 +393,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -399,7 +412,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -419,7 +432,7 @@
"request": "launch",
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -430,61 +443,18 @@
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_DEBUG_jest",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "1"
}
]
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_DEBUG_EventLoop",
"value": "1"
},
{
"name": "BUN_DEBUG_uv",
"value": "1"
},
{
"name": "BUN_DEBUG_SYS",
"value": "1"
},
{
"name": "BUN_DEBUG_PipeWriter",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -499,14 +469,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -521,14 +491,36 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
},
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -549,17 +541,17 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${file}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -580,10 +572,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun run [file]
{
@@ -606,7 +598,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -628,7 +620,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -650,7 +642,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -678,10 +670,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -709,10 +701,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun test [...]
{
@@ -720,7 +712,7 @@
"request": "launch",
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -735,14 +727,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -757,14 +749,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -779,14 +771,36 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --only",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["--env-file=${workspaceFolder}/.env", "test", "--only", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -801,14 +815,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -823,14 +837,14 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -851,17 +865,17 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "${input:testName}"],
"args": ["--env-file=${workspaceFolder}/.env", "test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
@@ -882,10 +896,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun test [*]
{
@@ -908,7 +922,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -930,7 +944,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -958,10 +972,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -971,7 +985,7 @@
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"console": "internalConsole"
}
},
],
"inputs": [
{
@@ -983,6 +997,6 @@
"id": "testName",
"type": "promptString",
"description": "Usage: bun test [...]"
}
},
]
}

24
.vscode/settings.json vendored
View File

@@ -52,37 +52,37 @@
},
// JavaScript
"prettier.enable": true,
"prettier.enable": false,
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
"[javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
// TypeScript
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
// JSON
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
// Markdown
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.unicodeHighlight.ambiguousCharacters": true,
"editor.unicodeHighlight.invisibleCharacters": true,
"editor.defaultFormatter": "biomejs.biome",
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
@@ -94,12 +94,12 @@
// TOML
"[toml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
// YAML
"[yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
// Files

View File

@@ -2,39 +2,8 @@ cmake_minimum_required(VERSION 3.22)
cmake_policy(SET CMP0091 NEW)
cmake_policy(SET CMP0067 NEW)
set(Bun_VERSION "1.0.33")
set(WEBKIT_TAG 089023cc9078b3aa173869fd6685f3e7bed2a994)
if(APPLE AND DEFINED ENV{CI})
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
endif()
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
execute_process(COMMAND xcrun --show-sdk-path OUTPUT_VARIABLE SDKROOT)
string(STRIP ${SDKROOT} SDKROOT)
message(STATUS "Using SDKROOT: ${SDKROOT}")
SET(CMAKE_OSX_SYSROOT ${SDKROOT})
execute_process(COMMAND xcrun --sdk macosx --show-sdk-version OUTPUT_VARIABLE MACOSX_DEPLOYMENT_TARGET)
string(STRIP ${MACOSX_DEPLOYMENT_TARGET} MACOSX_DEPLOYMENT_TARGET)
set(CMAKE_OSX_DEPLOYMENT_TARGET ${MACOSX_DEPLOYMENT_TARGET})
# Check if current version of macOS is less than the deployment target and if so, raise an error
execute_process(COMMAND sw_vers -productVersion OUTPUT_VARIABLE MACOS_VERSION)
string(STRIP ${MACOS_VERSION} MACOS_VERSION)
if(MACOS_VERSION VERSION_LESS ${MACOSX_DEPLOYMENT_TARGET})
message(WARNING "\nThe current version of macOS (${MACOS_VERSION}) is less than the deployment target (${MACOSX_DEPLOYMENT_TARGET}).\nThis makes icucore fail to run at start.\nTo fix this, please either:\n- Upgrade to the latest version of macOS\n- Use `xcode-select` to switch to an SDK version <= ${MACOS_VERSION}")
endif()
endif()
if(APPLE)
message(STATUS "Building for macOS v${CMAKE_OSX_DEPLOYMENT_TARGET}")
endif()
set(Bun_VERSION "1.0.28")
set(WEBKIT_TAG c3712c13dcdc091cfe4c7cb8f2c1fd16472e6f92)
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
@@ -72,11 +41,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
else()
if(ZIG_OPTIMIZE STREQUAL "Debug")
set(bun "bun-debug")
else()
set(bun "bun-profile")
endif()
set(bun "bun-profile")
endif()
endif()
@@ -262,13 +227,6 @@ set(DEFAULT_USE_DEBUG_JSC, OFF)
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(DEFAULT_USE_DEBUG_JSC ON)
set(DEFAULT_LTO OFF)
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if(CI)
set(DEFAULT_LTO ON)
else()
set(DEFAULT_LTO OFF)
endif()
endif()
if(WIN32)
@@ -276,9 +234,9 @@ if(WIN32)
endif()
if(UNIX AND NOT APPLE)
execute_process(COMMAND grep -w "NAME" /etc/os-release OUTPUT_VARIABLE LINUX_DISTRO)
execute_process(COMMAND cat /etc/os-release COMMAND head -n1 OUTPUT_VARIABLE LINUX_DISTRO)
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"|NAME=\"openSUSE Tumbleweed\"\n")
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"\n")
set(DEFAULT_USE_STATIC_LIBATOMIC OFF)
endif()
endif()
@@ -305,8 +263,6 @@ option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore"
option(USE_UNIFIED_SOURCES "Use unified sources to speed up the build" OFF)
option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of libatomic.a" ${DEFAULT_USE_STATIC_LIBATOMIC})
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
if(USE_VALGRIND)
# Disable SIMD
set(USE_BASELINE_BUILD ON)
@@ -329,11 +285,9 @@ endif()
set(ERROR_LIMIT 100 CACHE STRING "Maximum number of errors to show when compiling C++ code")
set(ARCH x86_64)
set(HOMEBREW_PREFIX "/usr/local")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
set(ARCH aarch64)
set(HOMEBREW_PREFIX "/opt/homebrew")
endif()
if(NOT CPU_TARGET)
@@ -474,13 +428,7 @@ if(NOT WEBKIT_DIR)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-debug")
set(ASSERT_ENABLED "1")
elseif(NOT DEBUG AND NOT WIN32)
# Avoid waiting for LTO in local release builds outside of CI
if(USE_LTO)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
else()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "")
endif()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
set(ASSERT_ENABLED "0")
endif()
@@ -515,13 +463,6 @@ if(NOT WEBKIT_DIR)
endif()
set(WEBKIT_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include")
if(APPLE)
set(ICU_INCLUDE_DIR "")
else()
set(ICU_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include/wtf/unicode")
endif()
set(WEBKIT_LIB_DIR "${BUN_WORKDIR}/bun-webkit/lib")
elseif(WEBKIT_DIR STREQUAL "omit")
message(STATUS "Not using WebKit. This is only valid if you are only trying to build Zig code")
@@ -938,10 +879,6 @@ else()
add_compile_definitions("ASSERT_ENABLED=1")
endif()
if(ICU_INCLUDE_DIR)
include_directories(${ICU_INCLUDE_DIR})
endif()
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/packages/
${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets
@@ -1001,8 +938,6 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
-Werror=return-type
-Werror=return-stack-address
-Werror=implicit-function-declaration
-Werror=uninitialized
-Werror
)
else()
target_compile_options(${bun} PUBLIC /Od /Z7)
@@ -1010,31 +945,15 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
add_compile_definitions("BUN_DEBUG=1")
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(LTO_FLAG "")
if(NOT WIN32)
if(USE_LTO)
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
endif()
target_compile_options(${bun} PUBLIC -O3 ${LTO_FLAG} -g1
target_compile_options(${bun} PUBLIC -O3 -flto=full -emit-llvm -g1
-Werror=return-type
-Werror=return-stack-address
-Werror=implicit-function-declaration
-Werror=uninitialized
-Werror
)
else()
set(LTO_LINK_FLAG "")
if(USE_LTO)
# -emit-llvm seems to not be supported or under a different name on Windows.
list(APPEND LTO_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "/LTCG")
endif()
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG} /DEBUG /Z7)
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG)
target_compile_options(${bun} PUBLIC /O2 -flto=full /DEBUG /Z7)
target_link_options(${bun} PUBLIC /LTCG /DEBUG)
endif()
endif()
@@ -1086,6 +1005,12 @@ else()
endif()
if(APPLE)
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
target_link_options(${bun} PUBLIC "-dead_strip")
target_link_options(${bun} PUBLIC "-dead_strip_dylibs")
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
@@ -1151,7 +1076,21 @@ endif()
# --- ICU ---
if(APPLE)
# TODO: a much better check can be done to find this path
find_path(
ICU4C_DIR NAMES lib/libicudata.a
PATHS ENV PATH /usr/local/opt/icu4c /opt/homebrew/opt/icu4c
)
find_path(
ICONV_DIR NAMES lib/libiconv.a
PATHS ENV PATH /usr/local/opt/libiconv /opt/homebrew/opt/libiconv
)
target_link_libraries(${bun} PRIVATE "icucore")
target_link_libraries(${bun} PRIVATE "${ICONV_DIR}/lib/libiconv.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicudata.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicui18n.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicuuc.a")
endif()
# --- Stripped Binary "bun"

View File

@@ -16,7 +16,7 @@ ARG BUILD_MACHINE_ARCH=x86_64
ARG BUILDARCH=amd64
ARG TRIPLET=${ARCH}-linux-gnu
ARG GIT_SHA=""
ARG BUN_VERSION="bun-v1.0.30"
ARG BUN_VERSION="bun-v1.0.7"
ARG BUN_DOWNLOAD_URL_BASE="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${BUN_VERSION}"
ARG CANARY=0
ARG ASSERTIONS=OFF
@@ -372,7 +372,7 @@ ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache mkdir ${BUN_DIR}/build \
&& cd ${BUN_DIR}/build \
&& mkdir -p tmp_modules tmp_functions js codegen \
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_LTO=ON -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
&& bash compile-cpp-only.sh -v
FROM bun-base-with-zig as bun-codegen-for-zig
@@ -419,7 +419,6 @@ RUN mkdir -p build \
&& cmake .. \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
-DCPU_TARGET="${CPU_TARGET}" \
-DZIG_TARGET="${TRIPLET}" \
@@ -477,7 +476,6 @@ RUN cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
-DUSE_LTO=ON \
-DUSE_DEBUG_JSC=${ASSERTIONS} \
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
@@ -542,7 +540,6 @@ RUN cmake .. \
-DNO_CONFIGURE_DEPENDS=1 \
-DCANARY="${CANARY}" \
-DZIG_COMPILER=system \
-DUSE_LTO=ON \
&& ninja -v \
&& ./bun --revision \
&& mkdir -p /build/out \

View File

@@ -24,9 +24,9 @@
## What is Bun?
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
@@ -34,12 +34,12 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
bun run index.tsx # TS and JSX supported out-of-the-box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
```bash
bun test # run tests
bun run start # run the `start` script in `package.json`
bun install <pkg> # install a package
bun install <pkg> # install a package
bunx cowsay 'Hello, world!' # execute a package
```

Binary file not shown.

View File

@@ -12,7 +12,7 @@
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"string-width": "^7.0.0",
"zx": "^7.2.3"
},
"scripts": {

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,9 @@
import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { mkdirSync, writeFileSync } from "fs";
import { bench, run } from "./runner.mjs";
import { cp } from "fs/promises";
import { join } from "path";
import { tmpdir } from "os";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {
const root = join(tmpdir(), "huge");
const base = join(root, "directory", "for", "benchmarks", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10");
@@ -19,21 +18,14 @@ const hugeDirectory = (() => {
const hugeFilePath = join(tmpdir(), "huge-file-0.txt");
const hugeText = "Hello, world!".repeat(1000000);
writeFileSync(hugeFilePath, hugeText);
let base = process.argv.at(-1);
if (resolve(base) === fileURLToPath(import.meta.url)) {
base = tmpdir();
} else {
rmSync(base, { recursive: true, force: true });
mkdirSync(base, { recursive: true });
}
var hugeCopyI = 0;
bench("cp -r (1000 files)", async b => {
await cp(hugeDirectory, join(base, "huge-copy" + hugeCopyI++), { recursive: true });
await cp(hugeDirectory, join(tmpdir(), "huge-copy" + hugeCopyI++), { recursive: true });
});
bench("cp 1 " + ((hugeText.length / 1024) | 0) + " KB file", async b => {
await cp(hugeFilePath, join(base, "huge-file" + hugeCopyI++));
await cp(hugeFilePath, join(tmpdir(), "huge-file" + hugeCopyI++));
});
await run();

View File

@@ -5,11 +5,6 @@ const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
const callback = noop.callback;
bench("C++ callback into JS", () => {
callback(() => {});
});
bench("C++ fn regular", () => {
regular();

View File

@@ -1,73 +0,0 @@
import { tmpdir } from "node:os";
import { bench, group, run } from "./runner.mjs";
import { createReadStream, writeFileSync } from "node:fs";
import { sep } from "node:path";
if (!Promise.withResolvers) {
Promise.withResolvers = function () {
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
};
}
const ALLOW_BUN = typeof Bun !== "undefined";
const ALLOW_NODE = true;
const dir = tmpdir() + sep;
var short = (function () {
const text = "Hello World!";
const path = dir + "bun-bench-short.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var shortUTF16 = (function () {
const text = "Hello World 💕💕💕";
const path = dir + "bun-bench-shortUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var long = (function () {
const text = "Hello World!".repeat(1024);
const path = dir + "bun-bench-long.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var longUTF16 = (function () {
const text = "Hello World 💕💕💕".repeat(15 * 70192);
const path = dir + "bun-bench-longUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
async function bun(path) {
for await (const chunk of Bun.file(path).stream()) {
chunk;
}
}
async function node(path) {
const { promise, resolve } = Promise.withResolvers();
const stream = createReadStream(path);
stream.on("data", chunk => {});
stream.on("end", () => resolve());
await promise;
}
ALLOW_BUN && bench("short - bun", () => bun(short.path));
ALLOW_NODE && bench("short - node", () => node(short.path));
ALLOW_BUN && bench("shortUTF16 - bun", () => bun(shortUTF16.path));
ALLOW_NODE && bench("shortUTF16 - node", () => node(shortUTF16.path));
ALLOW_BUN && bench("long - bun", () => bun(long.path));
ALLOW_NODE && bench("long - node", () => node(long.path));
ALLOW_BUN && bench("longUTF16 - bun", () => bun(longUTF16.path));
ALLOW_NODE && bench("longUTF16 - node", () => node(longUTF16.path));
await run();

View File

@@ -3,38 +3,41 @@ import npmStringWidth from "string-width";
const bunStringWidth = globalThis?.Bun?.stringWidth;
const stringWidth = bunStringWidth || npmStringWidth;
const formatter = new Intl.NumberFormat();
const format = n => {
return formatter.format(n);
};
bench("npm/string-width (ansi + emoji + ascii)", () => {
npmStringWidth("hello there! 😀\u001b[31m😀😀");
});
const inputs = [
["hello", "ascii"],
["[31mhello", "ascii+ansi"],
["hello😀", "ascii+emoji"],
["[31m😀😀", "ansi+emoji"],
["😀hello😀[31m😀😀😀", "ansi+emoji+ascii"],
];
bench("npm/string-width (ansi + emoji)", () => {
npmStringWidth("😀\u001b[31m😀😀");
});
const repeatCounts = [1, 10, 100, 1000, 5000];
bench("npm/string-width (ansi + ascii)", () => {
npmStringWidth("\u001b[31mhello there!");
});
const maxInputLength = Math.max(...inputs.map(([input]) => input.repeat(Math.max(...repeatCounts)).length));
if (bunStringWidth) {
bench("Bun.stringWidth (ansi + emoji + ascii)", () => {
bunStringWidth("hello there! 😀\u001b[31m😀😀");
});
for (const [input, textLabel] of inputs) {
for (let repeatCount of repeatCounts) {
const label = bunStringWidth ? "Bun.stringWidth" : "npm/string-width";
bench("Bun.stringWidth (ansi + emoji)", () => {
bunStringWidth("😀\u001b[31m😀😀");
});
const str = input.repeat(repeatCount);
const name = `${label} ${format(str.length).padStart(format(maxInputLength).length, " ")} chars ${textLabel}`;
bench("Bun.stringWidth (ansi + ascii)", () => {
bunStringWidth("\u001b[31mhello there!");
});
bench(name, () => {
stringWidth(str);
});
if (npmStringWidth("😀\u001b[31m😀😀") !== bunStringWidth("😀\u001b[31m😀😀")) {
console.error("string-width mismatch");
}
if (bunStringWidth && bunStringWidth(str) !== npmStringWidth(str)) {
throw new Error("string-width mismatch");
}
if (npmStringWidth("hello there! 😀\u001b[31m😀😀") !== bunStringWidth("hello there! 😀\u001b[31m😀😀")) {
console.error("string-width mismatch");
}
if (npmStringWidth("\u001b[31mhello there!") !== bunStringWidth("\u001b[31mhello there!")) {
console.error("string-width mismatch");
}
}

View File

@@ -7,8 +7,8 @@
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"deps": "npm install && bash src/download.sh",
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
"deps": "npm install && sh src/download.sh",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

70
biome.json Normal file
View File

@@ -0,0 +1,70 @@
{
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": false
},
"javascript": {
"parser": {
"unsafeParameterDecoratorsEnabled": true
},
"formatter": {
"arrowParentheses": "asNeeded",
"quoteProperties": "preserve",
"semicolons": "always",
"trailingComma": "all",
"indentStyle": "space",
"quoteStyle": "double"
}
},
"json": {
"formatter": {
"indentStyle": "space"
},
"parser": {
"allowComments": true,
"allowTrailingCommas": true
}
},
"vcs": {
"clientKind": "git",
"enabled": false,
"root": "./"
},
"files": {
"maxSize": 9128312873
},
"formatter": {
"enabled": true,
"indentWidth": 2,
"lineEnding": "lf",
"formatWithErrors": true,
"lineWidth": 120,
"indentStyle": "space",
"ignore": [
"node_modules/**",
"test/snapshots",
"test/fixtures",
".next",
"test/js/deno",
"./src/deps",
"./src/bun.js/WebKit/**",
"packages/bun-polyfills",
"./build-*",
"./build",
".cache",
"out/",
"test/transpiler/property-non-ascii-fixture.js",
"test/transpiler/macro-test.test.ts",
"test/transpiler/decorator-metadata.test.ts",
"src/react-refresh.js",
"bindings-obj/*",
"src/deps/**",
"./bench/react-hello-world/react-hello-world.node.js",
"./test/cli/run/require-cache-bug-leak-fixture-large-ast.js",
"./test/cli/run/esm-leak-fixture-large-ast.mjs"
]
}
}

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const pathRel = std.fs.path.relative;
const builtin = @import("builtin");
const Wyhash11 = @import("./src/wyhash.zig").Wyhash11;
const Wyhash = @import("./src/wyhash.zig").Wyhash;
const zig_version = builtin.zig_version;
@@ -84,7 +84,7 @@ const BunBuildOptions = struct {
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const runtime_hash = Wyhash11.hash(
const runtime_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
@@ -97,7 +97,7 @@ const BunBuildOptions = struct {
if (std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const fallback_hash = Wyhash11.hash(
const fallback_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);

BIN
bun.lockb

Binary file not shown.

View File

@@ -6,4 +6,3 @@
#
# Instead, we can only scan the test directory for Bun's runtime tests
root = "test"
preload = "./test/preload.ts"

View File

@@ -2,7 +2,7 @@
name: bun
appspec: { version: "0.001" }
plugins: [-Meta]
title: A tool for installing and managing JavaScript packages
title: A tool for installing and managing Python packages
options:
- version|V --Show version and exit

View File

@@ -96,16 +96,18 @@ FROM alpine:3.18
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
COPY --from=build /tmp/glibc.apk /tmp/
COPY --from=build /tmp/glibc-bin.apk /tmp/
COPY --from=build /usr/local/bin/bun /usr/local/bin/
COPY docker-entrypoint.sh /usr/local/bin/
# Temporarily use the `build`-stage /tmp folder to access the glibc APKs:
RUN --mount=type=bind,from=build,source=/tmp,target=/tmp \
addgroup -g 1000 bun \
RUN addgroup -g 1000 bun \
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
&& apk --no-cache --force-overwrite --allow-untrusted add \
/tmp/glibc.apk \
/tmp/glibc-bin.apk \
&& rm /tmp/glibc.apk \
&& rm /tmp/glibc-bin.apk \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \

View File

@@ -58,18 +58,17 @@ Pass a path to the shared library and a map of symbols to import into `dlopen`:
```ts
import { dlopen, FFIType, suffix } from "bun:ffi";
const { i32 } = FFIType;
const path = `libadd.${suffix}`;
const lib = dlopen(path, {
add: {
args: [i32, i32],
returns: i32,
args: [FFIType.i32, FFIType.i32],
returns: FFIType.i32,
},
});
console.log(lib.symbols.add(1, 2));
lib.symbols.add(1, 2);
```
### Rust
@@ -77,7 +76,7 @@ console.log(lib.symbols.add(1, 2));
```rust
// add.rs
#[no_mangle]
pub extern "C" fn add(a: i32, b: i32) -> i32 {
pub extern "C" fn add(a: isize, b: isize) -> isize {
a + b
}
```
@@ -88,22 +87,6 @@ To compile:
$ rustc --crate-type cdylib add.rs
```
### C++
```cpp
#include <cstdint>
extern "C" int32_t add(int32_t a, int32_t b) {
return a + b;
}
```
To compile:
```bash
$ zig build-lib add.cpp -dynamic -lc -lc++
```
## FFI types
The following `FFIType` values are supported.

View File

@@ -261,12 +261,13 @@ This function is optimized for large input. On an M1X, it processes 480 MB/s -
20 GB/s, depending on how much data is being escaped and whether there is non-ascii
text. Non-string types will be converted to a string before escaping.
## `Bun.stringWidth()` ~6,756x faster `string-width` alternative
## `Bun.stringWidth()`
Get the column count of a string as it would be displayed in a terminal.
Supports ANSI escape codes, emoji, and wide characters.
```ts
Bun.stringWidth(input: string, options?: { countAnsiEscapeCodes?: boolean = false }): number
```
Example usage:
Returns the number of columns required to display a string. This is useful for aligning text in a terminal. By default, ANSI escape codes are removed before measuring the string. To include them, pass `{ countAnsiEscapeCodes: true }` as the second argument.
```ts
Bun.stringWidth("hello"); // => 5
@@ -274,131 +275,8 @@ Bun.stringWidth("\u001b[31mhello\u001b[0m"); // => 5
Bun.stringWidth("\u001b[31mhello\u001b[0m", { countAnsiEscapeCodes: true }); // => 12
```
This is useful for:
- Aligning text in a terminal
- Quickly checking if a string contains ANSI escape codes
- Measuring the width of a string in a terminal
Compared with the popular `string-width` npm package, `bun`'s implementation is > [100x faster](https://github.com/oven-sh/bun/blob/8abd1fb088bcf2e78bd5d0d65ba4526872d2ab61/bench/snippets/string-width.mjs#L22)
This API is designed to match the popular "string-width" package, so that
existing code can be easily ported to Bun and vice versa.
[In this benchmark](https://github.com/oven-sh/bun/blob/5147c0ba7379d85d4d1ed0714b84d6544af917eb/bench/snippets/string-width.mjs#L13), `Bun.stringWidth` is a ~6,756x faster than the `string-width` npm package for input larger than about 500 characters. Big thanks to [sindresorhus](https://github.com/sindresorhus) for their work on `string-width`!
```ts
bun string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: bun 1.0.29 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
Bun.stringWidth 500 chars ascii 37.09 ns/iter (36.77 ns … 41.11 ns) 37.07 ns 38.84 ns 38.99 ns
node string-width.mjs
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
npm/string-width 500 chars ascii 249,710 ns/iter (239,970 ns … 293,180 ns) 250,930 ns 276,700 ns 281,450 ns
```
To make `Bun.stringWidth` fast, we've implemented it in Zig using optimized SIMD instructions, accounting for Latin1, UTF-16, and UTF-8 encodings. It passes `string-width`'s tests.
{% details summary="View full benchmark" %}
As a reminder, 1 nanosecond (ns) is 1 billionth of a second. Here's a quick reference for converting between units:
| Unit | 1 Millisecond |
| ---- | ------------- |
| ns | 1,000,000 |
| µs | 1,000 |
| ms | 1 |
```js
bun string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: bun 1.0.29 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
Bun.stringWidth 5 chars ascii 16.45 ns/iter (16.27 ns … 19.71 ns) 16.48 ns 16.93 ns 17.21 ns
Bun.stringWidth 50 chars ascii 19.42 ns/iter (18.61 ns … 27.85 ns) 19.35 ns 21.7 ns 22.31 ns
Bun.stringWidth 500 chars ascii 37.09 ns/iter (36.77 ns … 41.11 ns) 37.07 ns 38.84 ns 38.99 ns
Bun.stringWidth 5,000 chars ascii 216.9 ns/iter (215.8 ns … 228.54 ns) 216.23 ns 228.52 ns 228.53 ns
Bun.stringWidth 25,000 chars ascii 1.01 µs/iter (1.01 µs … 1.01 µs) 1.01 µs 1.01 µs 1.01 µs
Bun.stringWidth 7 chars ascii+emoji 54.2 ns/iter (53.36 ns … 58.19 ns) 54.23 ns 57.55 ns 57.94 ns
Bun.stringWidth 70 chars ascii+emoji 354.26 ns/iter (350.51 ns … 363.96 ns) 355.93 ns 363.11 ns 363.96 ns
Bun.stringWidth 700 chars ascii+emoji 3.3 µs/iter (3.27 µs … 3.4 µs) 3.3 µs 3.4 µs 3.4 µs
Bun.stringWidth 7,000 chars ascii+emoji 32.69 µs/iter (32.22 µs … 45.27 µs) 32.7 µs 34.57 µs 34.68 µs
Bun.stringWidth 35,000 chars ascii+emoji 163.35 µs/iter (161.17 µs … 170.79 µs) 163.82 µs 169.66 µs 169.93 µs
Bun.stringWidth 8 chars ansi+emoji 66.15 ns/iter (65.17 ns … 69.97 ns) 66.12 ns 69.8 ns 69.87 ns
Bun.stringWidth 80 chars ansi+emoji 492.95 ns/iter (488.05 ns … 499.5 ns) 494.8 ns 498.58 ns 499.5 ns
Bun.stringWidth 800 chars ansi+emoji 4.73 µs/iter (4.71 µs … 4.88 µs) 4.72 µs 4.88 µs 4.88 µs
Bun.stringWidth 8,000 chars ansi+emoji 47.02 µs/iter (46.37 µs … 67.44 µs) 46.96 µs 49.57 µs 49.63 µs
Bun.stringWidth 40,000 chars ansi+emoji 234.45 µs/iter (231.78 µs … 240.98 µs) 234.92 µs 236.34 µs 236.62 µs
Bun.stringWidth 19 chars ansi+emoji+ascii 135.46 ns/iter (133.67 ns … 143.26 ns) 135.32 ns 142.55 ns 142.77 ns
Bun.stringWidth 190 chars ansi+emoji+ascii 1.17 µs/iter (1.16 µs … 1.17 µs) 1.17 µs 1.17 µs 1.17 µs
Bun.stringWidth 1,900 chars ansi+emoji+ascii 11.45 µs/iter (11.26 µs … 20.41 µs) 11.45 µs 12.08 µs 12.11 µs
Bun.stringWidth 19,000 chars ansi+emoji+ascii 114.06 µs/iter (112.86 µs … 120.06 µs) 114.25 µs 115.86 µs 116.15 µs
Bun.stringWidth 95,000 chars ansi+emoji+ascii 572.69 µs/iter (565.52 µs … 607.22 µs) 572.45 µs 604.86 µs 605.21 µs
```
```ts
node string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: node v21.4.0 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
-------------------------------------------------------------------------------------- -----------------------------
npm/string-width 5 chars ascii 3.19 µs/iter (3.13 µs … 3.48 µs) 3.25 µs 3.48 µs 3.48 µs
npm/string-width 50 chars ascii 20.09 µs/iter (18.93 µs … 435.06 µs) 19.49 µs 21.89 µs 22.59 µs
npm/string-width 500 chars ascii 249.71 µs/iter (239.97 µs … 293.18 µs) 250.93 µs 276.7 µs 281.45 µs
npm/string-width 5,000 chars ascii 6.69 ms/iter (6.58 ms … 6.76 ms) 6.72 ms 6.76 ms 6.76 ms
npm/string-width 25,000 chars ascii 139.57 ms/iter (137.17 ms … 143.28 ms) 140.49 ms 143.28 ms 143.28 ms
npm/string-width 7 chars ascii+emoji 3.7 µs/iter (3.62 µs … 3.94 µs) 3.73 µs 3.94 µs 3.94 µs
npm/string-width 70 chars ascii+emoji 23.93 µs/iter (22.44 µs … 331.2 µs) 23.15 µs 25.98 µs 30.2 µs
npm/string-width 700 chars ascii+emoji 251.65 µs/iter (237.78 µs … 444.69 µs) 252.92 µs 325.89 µs 354.08 µs
npm/string-width 7,000 chars ascii+emoji 4.95 ms/iter (4.82 ms … 5.19 ms) 5 ms 5.04 ms 5.19 ms
npm/string-width 35,000 chars ascii+emoji 96.93 ms/iter (94.39 ms … 102.58 ms) 97.68 ms 102.58 ms 102.58 ms
npm/string-width 8 chars ansi+emoji 3.92 µs/iter (3.45 µs … 4.57 µs) 4.09 µs 4.57 µs 4.57 µs
npm/string-width 80 chars ansi+emoji 24.46 µs/iter (22.87 µs … 4.2 ms) 23.54 µs 25.89 µs 27.41 µs
npm/string-width 800 chars ansi+emoji 259.62 µs/iter (246.76 µs … 480.12 µs) 258.65 µs 349.84 µs 372.55 µs
npm/string-width 8,000 chars ansi+emoji 5.46 ms/iter (5.41 ms … 5.57 ms) 5.48 ms 5.55 ms 5.57 ms
npm/string-width 40,000 chars ansi+emoji 108.91 ms/iter (107.55 ms … 109.5 ms) 109.25 ms 109.5 ms 109.5 ms
npm/string-width 19 chars ansi+emoji+ascii 6.53 µs/iter (6.35 µs … 6.75 µs) 6.54 µs 6.75 µs 6.75 µs
npm/string-width 190 chars ansi+emoji+ascii 55.52 µs/iter (52.59 µs … 352.73 µs) 54.19 µs 80.77 µs 167.21 µs
npm/string-width 1,900 chars ansi+emoji+ascii 701.71 µs/iter (653.94 µs … 893.78 µs) 715.3 µs 855.37 µs 872.9 µs
npm/string-width 19,000 chars ansi+emoji+ascii 27.19 ms/iter (26.89 ms … 27.41 ms) 27.28 ms 27.41 ms 27.41 ms
npm/string-width 95,000 chars ansi+emoji+ascii 3.68 s/iter (3.66 s … 3.7 s) 3.69 s 3.7 s 3.7 s
```
{% /details %}
TypeScript definition:
```ts
namespace Bun {
export function stringWidth(
/**
* The string to measure
*/
input: string,
options?: {
/**
* If `true`, count ANSI escape codes as part of the string width. If `false`, ANSI escape codes are ignored when calculating the string width.
*
* @default false
*/
countAnsiEscapeCodes?: boolean;
/**
* When it's ambiugous and `true`, count emoji as 1 characters wide. If `false`, emoji are counted as 2 character wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
},
): number;
}
```
<!-- ## `Bun.enableANSIColors()` -->

View File

@@ -32,26 +32,6 @@ All imported files and packages are bundled into the executable, along with a co
{% /callout %}
## Deploying to production
Compiled executables reduce memory usage and improve Bun's start time.
Normally, Bun reads and transpiles JavaScript and TypeScript files on `import` and `require`. This is part of what makes so much of Bun "just work", but it's not free. It costs time and memory to read files from disk, resolve file paths, parse, transpile, and print source code.
With compiled executables, you can move that cost from runtime to build-time.
When deploying to production, we recommend the following:
```sh
bun build --compile --minify --sourcemap ./path/to/my/app.ts --outfile myapp
```
**What do these flags do?**
The `--minify` argument optimizes the size of the transpiled output code. If you have a large application, this can save megabytes of space. For smaller applications, it might still improve start time a little.
The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that errors & stacktraces point to their original locations instead of the transpiled location. Bun will automatically decompress & resolve the sourcemap when an error occurs.
## SQLite
You can use `bun:sqlite` imports with `bun build --compile`.

View File

@@ -195,7 +195,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
- name: Install dependencies

View File

@@ -75,6 +75,14 @@ $ bun run dev --watch # ❌ don't do this
Flags that occur at the end of the command will be ignored and passed through to the `"dev"` script itself.
{% /callout %}
### `--smol`
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
## Run a `package.json` script
{% note %}
@@ -87,7 +95,7 @@ $ bun [bun flags] run <script> [script flags]
Your `package.json` can define a number of named `"scripts"` that correspond to shell commands.
```json
```jsonc
{
// ... other fields
"scripts": {
@@ -150,32 +158,3 @@ By default, Bun respects this shebang and executes the script with `node`. Howev
```bash
$ bun run --bun vite
```
## `bun run -` to pipe code from stdin
`bun run -` lets you read JavaScript, TypeScript, TSX, or JSX from stdin and execute it without writing to a temporary file first.
```bash
$ echo "console.log('Hello')" | bun run -
Hello
```
You can also use `bun run -` to redirect files into Bun. For example, to run a `.js` file as if it were a `.ts` file:
```bash
$ echo "console.log!('This is TypeScript!' as any)" > secretly-typescript.js
$ bun run - < secretly-typescript.js
This is TypeScript!
```
For convenience, all code is treated as TypeScript with JSX support when using `bun run -`.
## `bun run --smol`
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
This causes the garbage collector to run more frequently, which can slow down execution. However, it can be useful in environments with limited memory. Bun automatically adjusts the garbage collector's heap size based on the available memory (accounting for cgroups and other memory limits) with and without the `--smol` flag, so this is mostly useful for cases where you want to make the heap size grow more slowly.

View File

@@ -1,33 +0,0 @@
---
name: fetch with unix domain sockets in Bun
---
In Bun, the `unix` option in `fetch()` lets you send HTTP requests over a [unix domain socket](https://en.wikipedia.org/wiki/Unix_domain_socket).
```ts
const unix = "/var/run/docker.sock";
const response = await fetch("http://localhost/info", { unix });
const body = await response.json();
console.log(body); // { ... }
```
---
The `unix` option is a string that specifies the local file path to a unix domain socket. The `fetch()` function will use the socket to send the request to the server instead of using a TCP network connection. `https` is also supported by using the `https://` protocol in the URL instead of `http://`.
To send a `POST` request to an API endpoint over a unix domain socket:
```ts
const response = await fetch("https://hostname/a/path", {
unix: "/var/run/path/to/unix.sock",
method: "POST",
body: JSON.stringify({ message: "Hello from Bun!" }),
headers: {
"Content-Type": "application/json",
},
});
const body = await response.json();
```

View File

@@ -2,7 +2,7 @@
name: Common HTTP server usage
---
This starts an HTTP server listening on port `3000`. It demonstrates basic routing with a number of common responses and also handles POST data from standard forms or as JSON.
This starts an HTTP server listening on port `3000`. It demonstates basic routing with a number of common responses and also handles POST data from standard forms or as JSON.
See [`Bun.serve`](/docs/api/http) for details.

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# ...
- uses: actions/checkout@v4
- uses: actions/checkout@v3
+ - uses: oven-sh/setup-bun@v1
# run any `bun` or `bunx` command

View File

@@ -1,40 +0,0 @@
---
name: Run a Shell Command
---
Bun Shell is a cross-platform bash-like shell built in to Bun.
It provides a simple way to run shell commands in JavaScript and TypeScript. To get started, import the `$` function from the `bun` package and use it to run shell commands.
```ts#foo.ts
import { $ } from "bun";
await $`echo Hello, world!`; // => "Hello, world!"
```
---
The `$` function is a tagged template literal that runs the command and returns a promise that resolves with the command's output.
```ts#foo.ts
import { $ } from "bun";
const output = await $`ls -l`.text();
console.log(output);
```
---
To get each line of the output as an array, use the `lines` method.
```ts#foo.ts
import { $ } from "bun";
for await (const line of $`ls -l`.lines()) {
console.log(line);
}
```
---
See [Docs > API > Shell](/docs/runtime/shell) for complete documentation.

View File

@@ -1,15 +0,0 @@
---
name: Get the path to an executable bin file
---
`Bun.which` is a utility function to find the absolute path of an executable file. It is similar to the `which` command in Unix-like systems.
```ts#foo.ts
Bun.which("sh"); // => "/bin/sh"
Bun.which("notfound"); // => null
Bun.which("bun"); // => "/home/user/.bun/bin/bun"
```
---
See [Docs > API > Utils](/docs/api/utils#bun-which) for complete documentation.

View File

@@ -0,0 +1,28 @@
---
name: Upgrade an HTTP request to a WebSocket connection
---
Inside `fetch`, use the `server.upgrade()` function to upgrade an incoming `Request` to a WebSocket connection. Bun automatically returns a 101 Switching Protocols response if the upgrade succeeds.
Refer to the [WebSocket docs](/docs/api/websockets) for more information on building WebSocket servers.
```ts
const server = Bun.serve<{ authToken: string }>({
fetch(req, server) {
const success = server.upgrade(req);
if (success) {
// Bun automatically returns a 101 Switching Protocols
// if the upgrade succeeds
return undefined;
}
// handle HTTP request normally
return new Response("Hello world!");
},
websocket: {
// define websocket handlers
},
});
console.log(`Listening on localhost:\${server.port}`);
```

View File

@@ -1,4 +1,4 @@
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
@@ -6,18 +6,18 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
$ bun run index.tsx # TS and JSX supported out of the box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager, all significantly faster than existing tools and usable in existing Node.js projects with little to no changes necessary.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager, all significantly faster than existing tools and usable in existing Node.js projects with little to no changes necessary.
```bash
$ bun run start # run the `start` script
$ bun install <pkg> # install a package
$ bun install <pkg> # install a package
$ bun build ./index.tsx # bundle a project for browsers
$ bun test # run tests
$ bunx cowsay 'Hello, world!' # execute a package
```
{% callout type="note" %}
**Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
**Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
{% /callout %}
Get started with one of the quick links below, or read on to learn more about Bun.

View File

@@ -1,6 +1,6 @@
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
{% details summary="Configuring cache behavior" (bunfig.toml) %}
{% details summary="Configuring cache behavior" %}
```toml
[install.cache]

View File

@@ -22,7 +22,8 @@ $ npm install -g bun # the last `npm` command you'll ever need
```
```bash#Homebrew
$ brew install oven-sh/bun/bun # for macOS and Linux
$ brew tap oven-sh/bun # for macOS and Linux
$ brew install bun
```
```bash#Docker

View File

@@ -13,7 +13,7 @@ $ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool
```
```bash#Ubuntu/Debian
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
$ sudo apt install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
```
```bash#Arch
@@ -24,18 +24,8 @@ $ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
```
{% /codetabs %}
{% callout }
**Note**: The Zig compiler is automatically installed and updated by the build scripts. Manual installation is not required.
{% /callout }
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
{% codetabs %}
@@ -89,10 +79,6 @@ $ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
```
```bash#openSUSE Tumbleweed
$ sudo zypper install clang16 lld16 llvm16
```
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
@@ -156,8 +142,6 @@ Advanced uses can pass CMake flags to customize the build.
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig` (`zig.exe` on Windows).
## Code generation scripts
{% callout %}

View File

@@ -30,20 +30,17 @@ Click the link in the right column to jump to the associated documentation.
---
- File I/O
- [`Bun.file`](/docs/api/file-io#reading-files-bun-file)
[`Bun.write`](/docs/api/file-io#writing-files-bun-write)
- [`Bun.file`](/docs/api/file-io#reading-files-bun-file) [`Bun.write`](/docs/api/file-io#writing-files-bun-write)
---
- Child processes
- [`Bun.spawn`](/docs/api/spawn#spawn-a-process-bun-spawn)
[`Bun.spawnSync`](/docs/api/spawn#blocking-api-bun-spawnsync)
- [`Bun.spawn`](/docs/api/spawn#spawn-a-process-bun-spawn) [`Bun.spawnSync`](/docs/api/spawn#blocking-api-bun-spawnsync)
---
- TCP
- [`Bun.listen`](/docs/api/tcp#start-a-server-bun-listen)
[`Bun.connect`](/docs/api/tcp#start-a-server-bun-listen)
- [`Bun.listen`](/docs/api/tcp#start-a-server-bun-listen) [`Bun.connect`](/docs/api/tcp#start-a-server-bun-listen)
---
@@ -63,8 +60,7 @@ Click the link in the right column to jump to the associated documentation.
---
- Hashing
- [`Bun.hash`](/docs/api/hashing#bun-hash)
[`Bun.CryptoHasher`](/docs/api/hashing#bun-cryptohasher)
- [`Bun.hash`](/docs/api/hashing#bun-hash) [`Bun.CryptoHasher`](/docs/api/hashing#bun-cryptohasher)
---
@@ -104,26 +100,6 @@ Click the link in the right column to jump to the associated documentation.
---
- Utilities
- [`Bun.version`](/docs/api/utils#bun-version)
[`Bun.revision`](/docs/api/utils#bun-revision)
[`Bun.env`](/docs/api/utils#bun-env)
[`Bun.main`](/docs/api/utils#bun-main)
[`Bun.sleep()`](/docs/api/utils#bun-sleep)
[`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync)
[`Bun.which()`](/docs/api/utils#bun-which)
[`Bun.peek()`](/docs/api/utils#bun-peek)
[`Bun.openInEditor()`](/docs/api/utils#bun-openineditor)
[`Bun.deepEquals()`](/docs/api/utils#bun-deepequals)
[`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml)
[`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath)
[`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl)
[`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync)
[`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync)
[`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync)
[`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync)
[`Bun.inspect()`](/docs/api/utils#bun-inspect)
[`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds)
[`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto)
[`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
- [`Bun.version`](/docs/api/utils#bun-version) [`Bun.revision`](/docs/api/utils#bun-revision) [`Bun.env`](/docs/api/utils#bun-env) [`Bun.main`](/docs/api/utils#bun-main) [`Bun.sleep()`](/docs/api/utils#bun-sleep) [`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync) [`Bun.which()`](/docs/api/utils#bun-which) [`Bun.peek()`](/docs/api/utils#bun-peek) [`Bun.openInEditor()`](/docs/api/utils#bun-openineditor) [`Bun.deepEquals()`](/docs/api/utils#bun-deepequals) [`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml) [`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath) [`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl) [`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync) [`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync) [`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync) [`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync) [`Bun.inspect()`](/docs/api/utils#bun-inspect) [`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds) [`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto) [`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
{% /table %}

View File

@@ -426,94 +426,4 @@ editor = "code"
# - "nvim", "neovim"
# - "vim","vi"
# - "emacs"
```
-->
## `bun run`
The `bun run` command can be configured under the `[run]` section. These apply to the `bun run` command and the `bun` command when running a file or executable or script.
Currently, `bunfig.toml` isn't always automatically loaded for `bun run` in a local project (it does check for a global `bunfig.toml`), so you might still need to pass `-c` or `-c=bunfig.toml` to use these settings.
### `run.shell` - use the system shell or Bun's shell
The shell to use when running package.json scripts via `bun run` or `bun`. On Windows, this defaults to `"bun"` and on other platforms it defaults to `"system"`.
To always use the system shell instead of Bun's shell (default behavior unless Windows):
```toml
[run]
# default outside of Windows
shell = "system"
```
To always use Bun's shell instead of the system shell:
```toml
[run]
# default on Windows
shell = "bun"
```
### `run.bun` - auto alias `node` to `bun`
When `true`, this prepends `$PATH` with a `node` symlink that points to the `bun` binary for all scripts or executables invoked by `bun run` or `bun`.
This means that if you have a script that runs `node`, it will actually run `bun` instead, without needing to change your script. This works recursively, so if your script runs another script that runs `node`, it will also run `bun` instead. This applies to shebangs as well, so if you have a script with a shebang that points to `node`, it will actually run `bun` instead.
By default, this is enabled if `node` is not already in your `$PATH`.
```toml
[run]
# equivalent to `bun --bun` for all `bun run` commands
bun = true
```
You can test this by running:
```sh
$ bun --bun which node # /path/to/bun
$ bun which node # /path/to/node
```
This option is equivalent to prefixing all `bun run` commands with `--bun`:
```sh
bun --bun run dev
bun --bun dev
bun run --bun dev
```
If set to `false`, this will disable the `node` symlink.
### `run.silent` - suppress reporting the command being run
When `true`, suppresses the output of the command being run by `bun run` or `bun`.
```toml
[run]
silent = true
```
Without this option, the command being run will be printed to the console:
```sh
$ bun run dev
> $ echo "Running \"dev\"..."
Running "dev"...
```
With this option, the command being run will not be printed to the console:
```sh
$ bun run dev
Running "dev"...
```
This is equivalent to passing `--silent` to all `bun run` commands:
```sh
bun --silent run dev
bun --silent dev
bun run --silent dev
```
``` -->

View File

@@ -163,16 +163,6 @@ These environment variables are read by Bun and configure aspects of its behavio
---
- `BUN_CONFIG_MAX_HTTP_REQUESTS`
- Control the maximum number of concurrent HTTP requests sent by fetch and `bun install`. Defaults to `256`. If you are running into rate limits or connection issues, you can reduce this number.
---
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
---
- `DO_NOT_TRACK`
- Telemetry is not sent yet as of November 28th, 2023, but we are planning to add telemetry in the coming months. If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.

View File

@@ -148,7 +148,7 @@ Some methods are not optimized yet.
### [`node:url`](https://nodejs.org/api/url.html)
🟢 Fully implemented.
🟡 Missing `domainToASCII` `domainToUnicode`. It's recommended to use `URL` and `URLSearchParams` globals instead.
### [`node:util`](https://nodejs.org/api/util.html)
@@ -432,7 +432,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
🟡 `URL.createObjectURL` is missing. See [Issue #3925](https://github.com/oven-sh/bun/issues/3925)
🟢 Fully implemented.
### [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)

View File

@@ -67,20 +67,9 @@ console.log(exitCode); // 0
## Redirection
A command's _input_ or _output_ may be _redirected_ using the typical Bash operators:
- `<` redirect stdin
- `>` or `1>` redirect stdout
- `2>` redirect stderr
- `&>` redirect both stdout and stderr
- `>>` or `1>>` redirect stdout, _appending_ to the destination, instead of overwriting
- `2>>` redirect stderr, _appending_ to the destination, instead of overwriting
- `&>>` redirect both stdout and stderr, _appending_ to the destination, instead of overwriting
- `1>&2` redirect stdout to stderr (all writes to stdout will instead be in stderr)
- `2>&1` redirect stderr to stdout (all writes to stderr will instead be in stdout)
Bun Shell supports redirection with `<`, `>`, and `|` operators.
Bun Shell also supports redirecting from and to JavaScript objects.
### Example: Redirect output to JavaScript objects (`>`)
### To JavaScript objects (`>`)
To redirect stdout to a JavaScript object, use the `>` operator:
@@ -99,7 +88,7 @@ The following JavaScript objects are supported for redirection to:
- `Buffer`, `Uint8Array`, `Uint16Array`, `Uint32Array`, `Int8Array`, `Int16Array`, `Int32Array`, `Float32Array`, `Float64Array`, `ArrayBuffer`, `SharedArrayBuffer` (writes to the underlying buffer)
- `Bun.file(path)`, `Bun.file(fd)` (writes to the file)
### Example: Redirect input from JavaScript objects (`<`)
### From JavaScript objects (`<`)
To redirect the output from JavaScript objects to stdin, use the `<` operator:
@@ -119,52 +108,7 @@ The following JavaScript objects are supported for redirection from:
- `Bun.file(path)`, `Bun.file(fd)` (reads from the file)
- `Response` (reads from the body)
### Example: Redirect stdin -> file
```js
import { $ } from "bun"
await $`cat < myfile.txt`
```
### Example: Redirect stdout -> file
```js
import { $ } from "bun"
await $`echo bun! > greeting.txt`
```
### Example: Redirect stderr -> file
```js
import { $ } from "bun"
await $`bun run index.ts 2> errors.txt`
```
### Example: Redirect stdout -> stderr
```js
import { $ } from "bun"
// redirects stderr to stdout, so all output
// will be available on stdout
await $`bun run ./index.ts 2>&1`
```
### Example: Redirect stderr -> stdout
```js
import { $ } from "bun"
// redirects stdout to stderr, so all output
// will be available on stderr
await $`bun run ./index.ts 1>&2`
```
## Piping (`|`)
### Piping (`|`)
Like in bash, you can pipe the output of one command to another:
@@ -400,26 +344,25 @@ await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`
// => baz
```
## .sh file loader
## .bun.sh file loader
For simple shell scripts, instead of `/bin/sh`, you can use Bun Shell to run shell scripts.
For simple shell scripts, instead of `sh`, you can use Bun Shell to run shell scripts.
To do so, just run the script with `bun` on a file with the `.sh` extension.
```sh#script.sh
echo "Hello World! pwd=$(pwd)"
```
To do that, run any file with bun that ends with `.bun.sh`:
```sh
$ echo "echo Hello World!" > script.bun.sh
$ bun ./script.bun.sh
> Hello World!
```
On Windows, Bun Shell is used automatically to run `.sh` files when using Bun:
```sh
$ echo "echo Hello World!" > script.sh
# On windows, .bun.sh is not needed, just .sh
$ bun ./script.sh
Hello World! pwd=/home/demo
```
Scripts with Bun Shell are cross platform, which means they work on Windows:
```
PS C:\Users\Demo> bun .\script.sh
Hello World! pwd=C:\Users\Demo
> Hello World!
```
## Credits

View File

@@ -327,7 +327,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.assertions()`](https://jestjs.io/docs/expect#expectassertionsnumber)
---
@@ -337,7 +337,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.hasAssertions()`](https://jestjs.io/docs/expect#expecthasassertions)
---

View File

@@ -21,7 +21,7 @@ const withExtensions = [
return !!json[key]?.extensions?.length;
})
.flatMap(mime => {
return [...new Set(json[mime].extensions)].map(ext => {
return [...new Set([...json[mime].extensions])].map(ext => {
return [`.{.@"${ext}", all.@"${mime}"}`];
});
})

View File

@@ -2,13 +2,14 @@
"private": true,
"name": "bun",
"dependencies": {
"@biomejs/biome": "1.5.3",
"@vscode/debugadapter": "^1.61.0",
"esbuild": "^0.17.15",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"mitata": "^0.1.3",
"peechy": "0.4.34",
"prettier": "^3.2.5",
"prettier": "3.2.2",
"react": "next",
"react-dom": "next",
"source-map-js": "^1.0.2",
@@ -24,10 +25,9 @@
"build": "if [ ! -e build ]; then bun setup; fi && ninja -C build",
"build:valgrind": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-valgrind && ninja -Cbuild-valgrind",
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
"fmt": "prettier --write --cache './{.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}/**/*.{mjs,ts,tsx,js,jsx}'",
"fmt": "biome format --write {.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}",
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix",

Binary file not shown.

Binary file not shown.

View File

@@ -7,11 +7,9 @@
"runners/qunit"
],
"dependencies": {
"@actions/core": "latest",
"p-queue": "^8.0.1"
"@actions/core": "latest"
},
"devDependencies": {
"@types/p-queue": "^3.2.1",
"bun-types": "canary",
"prettier": "^2.8.2"
},

View File

@@ -1,91 +1,49 @@
import * as action from "@actions/core";
import { spawn, spawnSync } from "child_process";
import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, close, closeSync } from "fs";
import { readFile, rm } from "fs/promises";
import { rmSync, writeFileSync, readFileSync } from "fs";
import { readFile } from "fs/promises";
import { readdirSync } from "node:fs";
import { resolve, basename } from "node:path";
import { constants, cpus, hostname, tmpdir, totalmem, userInfo } from "os";
import { join, normalize } from "path";
import { cpus, hostname, totalmem, userInfo } from "os";
import { fileURLToPath } from "url";
import PQueue from "p-queue";
const run_start = new Date();
const TIMEOUT_DURATION = 1000 * 60 * 5;
const SHORT_TIMEOUT_DURATION = Math.ceil(TIMEOUT_DURATION / 5);
function defaultConcurrency() {
// This causes instability due to the number of open file descriptors / sockets in some tests
// Windows has higher limits
if (process.platform !== "win32") {
return 1;
}
return Math.min(Math.floor((cpus().length - 2) / 2), 2);
}
const windows = process.platform === "win32";
const KEEP_TMPDIR = process.env["BUN_KEEP_TMPDIR"] === "1";
const nativeMemory = totalmem();
const force_ram_size_input = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
let force_ram_size = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
if (!(Number.isSafeInteger(force_ram_size_input) && force_ram_size_input > 0)) {
force_ram_size = force_ram_size_input + "";
}
function uncygwinTempDir() {
if (process.platform === "win32") {
for (let key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP"]) {
let TMPDIR = process.env[key] || "";
if (!/^\/[a-zA-Z]\//.test(TMPDIR)) {
continue;
}
const driveLetter = TMPDIR[1];
TMPDIR = path.win32.normalize(`${driveLetter.toUpperCase()}:` + TMPDIR.substring(2));
process.env[key] = TMPDIR;
}
}
}
uncygwinTempDir();
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
process.chdir(cwd);
const ci = !!process.env["GITHUB_ACTIONS"];
const enableProgressBar = false;
const enableProgressBar = !ci;
const dirPrefix = "bun-test-tmp-" + ((Math.random() * 100_000_0) | 0).toString(36) + "_";
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const queue = new PQueue({ concurrency: run_concurrency });
var prevTmpdir = "";
function maketemp() {
prevTmpdir = join(
tmpdir(),
dirPrefix + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36),
);
mkdirSync(prevTmpdir, { recursive: true });
return prevTmpdir;
function defaultConcurrency() {
// Concurrency causes more flaky tests, only enable it by default on windows
// See https://github.com/oven-sh/bun/issues/8071
if (windows) {
return Math.floor((cpus().length - 2) / 2);
}
return 1;
}
const extensions = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs", ".mts", ".cts", ".mjsx", ".cjsx", ".mtsx", ".ctsx"];
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const extensions = [".js", ".ts", ".jsx", ".tsx"];
const git_sha =
process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim();
const TEST_FILTER = process.env.BUN_TEST_FILTER;
function isTest(path) {
if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) {
return false;
}
if (TEST_FILTER) {
if (!path.includes(TEST_FILTER)) {
return false;
}
}
return true;
}
@@ -100,15 +58,8 @@ function* findTests(dir, query) {
}
}
let bunExe = "bun";
if (process.argv.length > 2) {
bunExe = resolve(process.argv.at(-1));
} else if (process.env.BUN_PATH) {
const { BUN_PATH_BASE, BUN_PATH } = process.env;
bunExe = resolve(normalize(BUN_PATH_BASE), normalize(BUN_PATH));
}
// pick the last one, kind of a hack to allow 'bun run test bun-release' to test the release build
let bunExe = (process.argv.length > 2 ? process.argv[process.argv.length - 1] : null) ?? "bun";
const { error, stdout: revision_stdout } = spawnSync(bunExe, ["--revision"], {
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
});
@@ -149,182 +100,57 @@ const failing_tests = [];
const passing_tests = [];
const fixes = [];
const regressions = [];
let maxFd = -1;
function getMaxFileDescriptor(path) {
if (process.platform === "win32") {
return -1;
}
hasInitialMaxFD = true;
if (process.platform === "linux") {
try {
readdirSync("/proc/self/fd").forEach(name => {
const fd = parseInt(name.trim(), 10);
if (Number.isSafeInteger(fd) && fd >= 0) {
maxFd = Math.max(maxFd, fd);
}
});
return maxFd;
} catch {}
}
const devnullfd = openSync("/dev/null", "r");
closeSync(devnullfd);
maxFd = devnullfd + 1;
return maxFd;
}
let hasInitialMaxFD = false;
const activeTests = new Map();
let slowTestCount = 0;
function checkSlowTests() {
const now = Date.now();
const prevSlowTestCount = slowTestCount;
slowTestCount = 0;
for (const [path, { start, proc }] of activeTests) {
if (proc && now - start >= TIMEOUT_DURATION) {
console.error(
`\x1b[31merror\x1b[0;2m:\x1b[0m Killing test ${JSON.stringify(path)} after ${Math.ceil((now - start) / 1000)}s`,
);
proc?.stdout?.destroy?.();
proc?.stderr?.destroy?.();
proc?.kill?.();
} else if (now - start > SHORT_TIMEOUT_DURATION) {
console.error(
`\x1b[33mwarning\x1b[0;2m:\x1b[0m Test ${JSON.stringify(path)} has been running for ${Math.ceil(
(now - start) / 1000,
)}s`,
);
slowTestCount++;
}
}
if (slowTestCount > prevSlowTestCount && queue.concurrency > 1) {
queue.concurrency += 1;
}
}
setInterval(checkSlowTests, SHORT_TIMEOUT_DURATION).unref();
var currentTestNumber = 0;
async function runTest(path) {
const thisTestNumber = currentTestNumber++;
const name = path.replace(cwd, "").slice(1);
let exitCode, signal, err, output;
const expected_crash_reason = windows
? await readFile(resolve(path), "utf-8").then(data => {
const match = data.match(/@known-failing-on-windows:(.*)\n/);
return match ? match[1].trim() : null;
})
const match = data.match(/@known-failing-on-windows:(.*)\n/);
return match ? match[1].trim() : null;
})
: null;
const start = Date.now();
const activeTestObject = { start, proc: undefined };
activeTests.set(path, activeTestObject);
try {
await new Promise((finish, reject) => {
const chunks = [];
process.stderr.write(
`
at ${((start - run_start.getTime()) / 1000).toFixed(2)}s, file ${thisTestNumber
.toString()
.padStart(total.toString().length, "0")}/${total}, ${failing_tests.length} failing files
Starting "${name}"
`,
);
const TMPDIR = maketemp();
const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"],
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
},
});
activeTestObject.proc = proc;
proc.stdout.once("end", () => {
done();
});
let doneCalls = 0;
var done = () => {
// TODO: wait for stderr as well
// spawn.test currently causes it to hang
if (doneCalls++ === 1) {
actuallyDone();
}
};
var actuallyDone = function () {
actuallyDone = done = () => {};
proc?.stderr?.unref?.();
proc?.stdout?.unref?.();
proc?.unref?.();
output = Buffer.concat(chunks).toString();
finish();
};
// if (!KEEP_TMPDIR)
// proc.once("close", () => {
// rm(TMPDIR, { recursive: true, force: true }).catch(() => {});
// });
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.once("close", () => {
activeTestObject.proc = undefined;
});
proc.once("exit", (code_, signal_) => {
activeTestObject.proc = undefined;
exitCode = code_;
signal = signal_;
if (signal || exitCode !== 0) {
actuallyDone();
} else {
done();
}
});
proc.once("error", err_ => {
activeTestObject.proc = undefined;
err = err_;
actuallyDone();
});
await new Promise((done, reject) => {
const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"],
timeout: 1000 * 60 * 3,
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
// reproduce CI results locally
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
},
});
} finally {
activeTests.delete(path);
}
if (!hasInitialMaxFD) {
getMaxFileDescriptor();
} else if (maxFd > 0) {
const prevMaxFd = maxFd;
maxFd = getMaxFileDescriptor();
if (maxFd > prevMaxFd + queue.concurrency * 2) {
process.stderr.write(
`\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${name}, delta: ${
maxFd - prevMaxFd
}, current: ${maxFd}, previous: ${prevMaxFd}\n`,
);
}
}
const chunks = [];
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.on("exit", (code_, signal_) => {
exitCode = code_;
signal = signal_;
output = Buffer.concat(chunks).toString();
done();
});
proc.on("error", err_ => {
err = err_;
done();
});
});
const passed = exitCode === 0 && !err && !signal;
@@ -369,8 +195,7 @@ Starting "${name}"
}
console.log(
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
} ${name}\x1b[0m${reason ? ` (${reason})` : ""}`,
);
@@ -392,7 +217,6 @@ Starting "${name}"
}
failing_tests.push({ path: name, reason, output, expected_crash_reason });
process.exitCode = 1;
if (err) console.error(err);
} else {
if (windows && expected_crash_reason !== null) {
@@ -401,11 +225,13 @@ Starting "${name}"
passing_tests.push(name);
}
return passed;
}
var finished = 0;
const queue = [...findTests(resolve(cwd, "test"))];
let running = 0;
let total = queue.length;
let finished = 0;
let on_entry_finish = null;
function writeProgressBar() {
const barWidth = Math.min(process.stdout.columns || 40, 80) - 2;
@@ -415,23 +241,34 @@ function writeProgressBar() {
process.stdout.write(`\r${str1}${" ".repeat(barWidth - str1.length)}]`);
}
const allTests = [...findTests(resolve(cwd, "test"))];
console.log(`Starting ${allTests.length} tests with ${run_concurrency} concurrency...`);
let total = allTests.length;
for (const path of allTests) {
queue.add(
async () =>
await runTest(path).catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
}),
);
while (queue.length > 0) {
if (running >= run_concurrency) {
await new Promise(resolve => (on_entry_finish = resolve));
continue;
}
const path = queue.shift();
running++;
runTest(path)
.catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
})
.finally(() => {
running--;
if (on_entry_finish) {
on_entry_finish();
on_entry_finish = null;
}
});
}
while (running > 0) {
await Promise.race([
new Promise(resolve => (on_entry_finish = resolve)),
new Promise(resolve => setTimeout(resolve, 1000)),
]);
}
await queue.onIdle();
console.log(`
Completed ${total} tests with ${failing_tests.length} failing tests
`);
console.log("\n");
function linkToGH(linkTo) {
@@ -442,13 +279,10 @@ function sectionLink(linkTo) {
return "#" + linkTo.replace(/[^a-zA-Z0-9_-]/g, "").toLowerCase();
}
failing_tests.sort((a, b) => a.path.localeCompare(b.path));
passing_tests.sort((a, b) => a.localeCompare(b));
const failingTestDisplay = failing_tests
.filter(({ reason }) => !regressions.some(({ path }) => path === path))
.map(({ path, reason }) => `- [\`${path}\`](${sectionLink(path)})${reason ? ` ${reason}` : ""}`)
.join("\n");
// const passingTestDisplay = passing_tests.map(path => `- \`${path}\``).join("\n");
rmSync("report.md", { force: true });
@@ -485,10 +319,9 @@ console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n"
console.log(header);
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
let report = `# bun test on ${
process.env["GITHUB_REF"] ??
let report = `# bun test on ${process.env["GITHUB_REF"] ??
spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim()
}
}
\`\`\`
${header}
@@ -512,8 +345,7 @@ if (regressions.length > 0) {
report += regressions
.map(
({ path, reason, expected_crash_reason }) =>
`- [\`${path}\`](${sectionLink(path)}) ${reason}${
expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
`- [\`${path}\`](${sectionLink(path)}) ${reason}${expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
}`,
)
.join("\n");
@@ -521,7 +353,7 @@ if (regressions.length > 0) {
}
if (failingTestDisplay.length > 0) {
report += `## Failing tests\n\n`;
report += `## ${windows ? "Known " : ""}Failing tests\n\n`;
report += failingTestDisplay;
report += "\n\n";
}
@@ -590,4 +422,4 @@ if (ci) {
}
}
process.exit(failing_tests.length ? 1 : process.exitCode);
process.exit(failing_tests.length ? 1 : 0);

Binary file not shown.

View File

@@ -32,20 +32,20 @@ export class PublishCommand extends BuildCommand {
}
const { layer, region, arch, output, public: isPublic } = flags;
if (region.includes("*")) {
// prettier-ignore
// biome-ignore: format ignore
const result = this.#aws(["ec2", "describe-regions", "--query", "Regions[].RegionName", "--output", "json"]);
region.length = 0;
for (const name of JSON.parse(result)) {
region.push(name);
}
} else if (!region.length) {
// prettier-ignore
// biome-ignore: format ignore
region.push(this.#aws(["configure", "get", "region"]));
}
this.log("Publishing...");
for (const regionName of region) {
for (const layerName of layer) {
// prettier-ignore
// biome-ignore: format ignore
const result = this.#aws([
"lambda",
"publish-layer-version",
@@ -70,7 +70,7 @@ export class PublishCommand extends BuildCommand {
const { LayerVersionArn } = JSON.parse(result);
this.log("Published", LayerVersionArn);
if (isPublic) {
// prettier-ignore
// biome-ignore: format ignore
this.#aws([
"lambda",
"add-layer-version-permission",

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -47,54 +47,6 @@ declare module "bun" {
*/
function which(command: string, options?: { PATH?: string; cwd?: string }): string | null;
/**
* Get the column count of a string as it would be displayed in a terminal.
* Supports ANSI escape codes, emoji, and wide characters.
*
* This is useful for:
* - Aligning text in a terminal
* - Quickly checking if a string contains ANSI escape codes
* - Measuring the width of a string in a terminal
*
* This API is designed to match the popular "string-width" package, so that
* existing code can be easily ported to Bun and vice versa.
*
* @returns The width of the string in columns
*
* ## Examples
* @example
* ```ts
* import { stringWidth } from "bun";
*
* console.log(stringWidth("abc")); // 3
* console.log(stringWidth("👩‍👩‍👧‍👦")); // 1
* console.log(stringWidth("\u001b[31mhello\u001b[39m")); // 5
* console.log(stringWidth("\u001b[31mhello\u001b[39m", { countAnsiEscapeCodes: false })); // 5
* console.log(stringWidth("\u001b[31mhello\u001b[39m", { countAnsiEscapeCodes: true })); // 13
* ```
*
*/
function stringWidth(
/**
* The string to measure
*/
input: string,
options?: {
/**
* If `true`, count ANSI escape codes as part of the string width. If `false`, ANSI escape codes are ignored when calculating the string width.
*
* @default false
*/
countAnsiEscapeCodes?: boolean;
/**
* When it's ambiugous and `true`, count emoji as 1 characters wide. If `false`, emoji are counted as 2 character wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
},
): number;
export type ShellFunction = (input: Uint8Array) => Uint8Array;
export type ShellExpression =
@@ -825,7 +777,7 @@ declare module "bun" {
* console.log(path); // "/foo/bar.txt"
* ```
*/
function fileURLToPath(url: URL | string): string;
function fileURLToPath(url: URL): string;
/**
* Fast incremental writer that becomes an `ArrayBuffer` on end().
@@ -1444,14 +1396,6 @@ declare module "bun" {
// origin?: string; // e.g. http://mydomain.com
loader?: { [k in string]: Loader };
sourcemap?: "none" | "inline" | "external"; // default: "none"
/**
* package.json `exports` conditions used when resolving imports
*
* Equivalent to `--conditions` in `bun build` or `bun run`.
*
* https://nodejs.org/api/packages.html#exports
*/
conditions?: Array<string> | string;
minify?:
| boolean
| {
@@ -4132,33 +4076,22 @@ declare module "bun" {
/**
* If true, the subprocess will have a hidden window.
*/
windowsHide?: boolean;
/**
* Path to the executable to run in the subprocess. This defaults to `cmds[0]`.
*
* One use-case for this is for applications which wrap other applications or to simulate a symlink.
*
* @default cmds[0]
*/
argv0?: string;
// windowsHide?: boolean;
}
type OptionsToSubprocess<Opts extends OptionsObject> =
Opts extends OptionsObject<infer In, infer Out, infer Err>
? Subprocess<
// "Writable extends In" means "if In === Writable",
// aka if true that means the user didn't specify anything
Writable extends In ? "ignore" : In,
Readable extends Out ? "pipe" : Out,
Readable extends Err ? "inherit" : Err
>
: Subprocess<Writable, Readable, Readable>;
type OptionsToSubprocess<Opts extends OptionsObject> = Opts extends OptionsObject<infer In, infer Out, infer Err>
? Subprocess<
// "Writable extends In" means "if In === Writable",
// aka if true that means the user didn't specify anything
Writable extends In ? "ignore" : In,
Readable extends Out ? "pipe" : Out,
Readable extends Err ? "inherit" : Err
>
: Subprocess<Writable, Readable, Readable>;
type OptionsToSyncSubprocess<Opts extends OptionsObject> =
Opts extends OptionsObject<any, infer Out, infer Err>
? SyncSubprocess<Readable extends Out ? "pipe" : Out, Readable extends Err ? "pipe" : Err>
: SyncSubprocess<Readable, Readable>;
type OptionsToSyncSubprocess<Opts extends OptionsObject> = Opts extends OptionsObject<any, infer Out, infer Err>
? SyncSubprocess<Readable extends Out ? "pipe" : Out, Readable extends Err ? "pipe" : Err>
: SyncSubprocess<Readable, Readable>;
type ReadableIO = ReadableStream<Uint8Array> | number | undefined;
@@ -4195,19 +4128,19 @@ declare module "bun" {
};
/**
* The amount of CPU time used by the process, in microseconds.
* The amount of CPU time used by the process, in nanoseconds.
*/
cpuTime: {
/**
* User CPU time used by the process, in microseconds.
* User CPU time used by the process, in nanoseconds.
*/
user: number;
/**
* System CPU time used by the process, in microseconds.
* System CPU time used by the process, in nanoseconds.
*/
system: number;
/**
* Total CPU time used by the process, in microseconds.
* Total CPU time used by the process, in nanoseconds.
*/
total: number;
};
@@ -4325,7 +4258,7 @@ declare module "bun" {
* Kill the process
* @param exitCode The exitCode to send to the process
*/
kill(exitCode?: number | NodeJS.Signals): void;
kill(exitCode?: number): void;
/**
* This method will tell Bun to wait for this process to exit after you already
@@ -4385,8 +4318,6 @@ declare module "bun" {
* Get the resource usage information of the process (max RSS, CPU time, etc)
*/
resourceUsage: ResourceUsage;
signalCode?: string;
}
/**
@@ -4482,8 +4413,6 @@ declare module "bun" {
* ```
*/
cmd: string[];
onExit?: never;
},
): SpawnOptions.OptionsToSyncSubprocess<Opts>;

Binary file not shown.

View File

@@ -584,7 +584,7 @@ declare global {
/**
* If set, specifies the initial value of process.env inside the Worker thread. As a special value, worker.SHARE_ENV may be used to specify that the parent thread and the child thread should share their environment variables; in that case, changes to one thread's process.env object affect the other thread as well. Default: process.env.
*/
env?: Record<string, string> | (typeof import("node:worker_threads"))["SHARE_ENV"] | undefined;
env?: Record<string, string> | typeof import("node:worker_threads")["SHARE_ENV"] | undefined;
/**
* In Bun, this does nothing.

View File

@@ -91,7 +91,6 @@ declare module "bun:test" {
interface Jest {
restoreAllMocks(): void;
clearAllMocks(): void;
fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
setSystemTime(now?: number | Date): void;
}
@@ -187,6 +186,12 @@ declare module "bun:test" {
* @param fn the function that defines the tests
*/
todo(label: string, fn?: () => void): void;
/**
* Marks this group of tests as broken and in need of fixing.
* @param label the label for the tests
* @param fn the function that defines the tests
*/
fixme(label: string, fn?: () => void): void;
/**
* Runs this group of tests, only if `condition` is true.
*
@@ -388,6 +393,21 @@ declare module "bun:test" {
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
): void;
/**
* Marks this test as broken and in need of fixing.
*
* If the test function fails, it will be marked as `fixme` in the test results
* instead of `fail`. This is useful for marking tests that are known to be broken.
*
* @param label the label for the test
* @param fn the test function
* @param options the test timeout or options
*/
fixme(
label: string,
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
): void;
/**
* Runs this test, if `condition` is true.
*
@@ -414,6 +434,30 @@ declare module "bun:test" {
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
) => void;
/**
* Marks this test as `todo`, if `condition` is true.
*
* @param condition if the test should be marked as `todo`
*/
todoIf(
condition: boolean,
): (
label: string,
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
) => void;
/**
* Marks this test as `fixme`, if `condition` is true.
*
* @param condition if the test should be marked as `fixme`
*/
fixmeIf(
condition: boolean,
): (
label: string,
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
options?: number | TestOptions,
) => void;
/**
* Returns a function that runs for each item in `table`.
*
@@ -547,16 +591,6 @@ declare module "bun:test" {
* ```
*/
unreachable(msg?: string | Error): never;
/**
* Ensures that an assertion is made
*/
hasAssertions(): void;
/**
* Ensures that a specific number of assertions are made
*/
assertions(neededAssertions: number): void;
}
/**
@@ -884,19 +918,6 @@ declare module "bun:test" {
* @param expected the expected value
*/
toStrictEqual(expected: T): void;
/**
* Asserts that the value is deep equal to an element in the expected array.
*
* The value must be an array or iterable, which includes strings.
*
* @example
* expect(1).toBeOneOf([1,2,3]);
* expect("foo").toBeOneOf(["foo", "bar"]);
* expect(true).toBeOneOf(new Set([true]));
*
* @param expected the expected value
*/
toBeOneOf(expected: Array<unknown> | Iterable<unknown>): void;
/**
* Asserts that a value contains what is expected.
*

View File

@@ -392,12 +392,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
internal_finalize_bsd_addr(addr);
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
// skip the extra fcntl calls.
return accepted_fd;
#else
return bsd_set_nonblocking(apple_no_sigpipe(accepted_fd));
#endif
}
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
@@ -478,7 +473,7 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
setsockopt(listenFd, SOL_SOCKET, SO_REUSEADDR, (void *) &optval3, sizeof(optval3));
}
#else
#if /*defined(__linux__) &&*/ defined(SO_REUSEPORT)
#if /*defined(__linux) &&*/ defined(SO_REUSEPORT)
if (!(options & LIBUS_LISTEN_EXCLUSIVE_PORT)) {
int optval = 1;
setsockopt(listenFd, SOL_SOCKET, SO_REUSEPORT, (void *) &optval, sizeof(optval));
@@ -567,91 +562,8 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket(const char *host, int port, int
#endif
#include <sys/stat.h>
#include <stddef.h>
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, int options) {
static int bsd_create_unix_socket_address(const char *path, size_t path_len, int* dirfd_linux_workaround_for_unix_path_len, struct sockaddr_un *server_address, size_t* addrlen) {
memset(server_address, 0, sizeof(struct sockaddr_un));
server_address->sun_family = AF_UNIX;
if (path_len == 0) {
#if defined(_WIN32)
// simulate ENOENT
SetLastError(ERROR_PATH_NOT_FOUND);
#else
errno = ENOENT;
#endif
return LIBUS_SOCKET_ERROR;
}
*addrlen = sizeof(struct sockaddr_un);
#if defined(__linux__)
// Unix socket addresses have a maximum length of 108 bytes on Linux
// As a workaround, we can use /proc/self/fd/ as a directory to shorten the path
if (path_len >= sizeof(server_address->sun_path) && path[0] != '\0') {
size_t dirname_len = path_len;
// get the basename
while (dirname_len > 1 && path[dirname_len - 1] != '/') {
dirname_len--;
}
// if the path is just a single character, or the path is too long, we cannot use this method
if (dirname_len < 2 || (path_len - dirname_len + 1) >= sizeof(server_address->sun_path)) {
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
char dirname_buf[4096];
if (dirname_len + 1 > sizeof(dirname_buf)) {
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
memcpy(dirname_buf, path, dirname_len);
dirname_buf[dirname_len] = 0;
int socket_dir_fd = open(dirname_buf, O_CLOEXEC | O_PATH | O_DIRECTORY, 0700);
if (socket_dir_fd == -1) {
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
int sun_path_len = snprintf(server_address->sun_path, sizeof(server_address->sun_path), "/proc/self/fd/%d/%s", socket_dir_fd, path + dirname_len);
if (sun_path_len >= sizeof(server_address->sun_path) || sun_path_len < 0) {
close(socket_dir_fd);
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
*dirfd_linux_workaround_for_unix_path_len = socket_dir_fd;
return 0;
} else if (path_len < sizeof(server_address->sun_path)) {
memcpy(server_address->sun_path, path, path_len);
// abstract domain sockets
if (server_address->sun_path[0] == 0) {
*addrlen = offsetof(struct sockaddr_un, sun_path) + path_len;
}
return 0;
}
#endif
if (path_len >= sizeof(server_address->sun_path)) {
#if defined(_WIN32)
// simulate ENAMETOOLONG
SetLastError(ERROR_FILENAME_EXCED_RANGE);
#else
errno = ENAMETOOLONG;
#endif
return LIBUS_SOCKET_ERROR;
}
memcpy(server_address->sun_path, path, path_len);
return 0;
}
static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char* path, int options, struct sockaddr_un* server_address, size_t addrlen) {
LIBUS_SOCKET_DESCRIPTOR listenFd = LIBUS_SOCKET_ERROR;
listenFd = bsd_create_socket(AF_UNIX, SOCK_STREAM, 0);
@@ -667,47 +579,25 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char
_chmod(path, S_IREAD | S_IWRITE | S_IEXEC);
#endif
struct sockaddr_un server_address;
memset(&server_address, 0, sizeof(server_address));
server_address.sun_family = AF_UNIX;
strcpy(server_address.sun_path, path);
int size = offsetof(struct sockaddr_un, sun_path) + strlen(server_address.sun_path);
#ifdef _WIN32
_unlink(path);
#else
unlink(path);
#endif
if (bind(listenFd, (struct sockaddr *)server_address, addrlen) || listen(listenFd, 512)) {
#if defined(_WIN32)
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
#endif
if (bind(listenFd, (struct sockaddr *)&server_address, size) || listen(listenFd, 512)) {
bsd_close_socket(listenFd);
#if defined(_WIN32)
if (shouldSimulateENOENT) {
SetLastError(ERROR_PATH_NOT_FOUND);
}
#endif
return LIBUS_SOCKET_ERROR;
}
return listenFd;
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, size_t len, int options) {
int dirfd_linux_workaround_for_unix_path_len = -1;
struct sockaddr_un server_address;
size_t addrlen = 0;
if (bsd_create_unix_socket_address(path, len, &dirfd_linux_workaround_for_unix_path_len, &server_address, &addrlen)) {
return LIBUS_SOCKET_ERROR;
}
LIBUS_SOCKET_DESCRIPTOR listenFd = internal_bsd_create_listen_socket_unix(path, options, &server_address, addrlen);
#if defined(__linux__)
if (dirfd_linux_workaround_for_unix_path_len != -1) {
close(dirfd_linux_workaround_for_unix_path_len);
}
#endif
return listenFd;
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port) {
struct addrinfo hints, *result;
memset(&hints, 0, sizeof(struct addrinfo));
@@ -968,44 +858,24 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket(const char *host, int port, co
#endif
}
static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_connect_socket_unix(const char *server_path, size_t len, int options, struct sockaddr_un* server_address, const size_t addrlen) {
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, int options) {
struct sockaddr_un server_address;
memset(&server_address, 0, sizeof(server_address));
server_address.sun_family = AF_UNIX;
strcpy(server_address.sun_path, server_path);
int size = offsetof(struct sockaddr_un, sun_path) + strlen(server_address.sun_path);
LIBUS_SOCKET_DESCRIPTOR fd = bsd_create_socket(AF_UNIX, SOCK_STREAM, 0);
if (fd == LIBUS_SOCKET_ERROR) {
return LIBUS_SOCKET_ERROR;
}
if (connect(fd, (struct sockaddr *)server_address, addrlen) != 0 && errno != EINPROGRESS) {
#if defined(_WIN32)
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
#endif
if (connect(fd, (struct sockaddr *)&server_address, size) != 0 && errno != EINPROGRESS) {
bsd_close_socket(fd);
#if defined(_WIN32)
if (shouldSimulateENOENT) {
SetLastError(ERROR_PATH_NOT_FOUND);
}
#endif
return LIBUS_SOCKET_ERROR;
}
return fd;
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, size_t len, int options) {
struct sockaddr_un server_address;
size_t addrlen = 0;
int dirfd_linux_workaround_for_unix_path_len = -1;
if (bsd_create_unix_socket_address(server_path, len, &dirfd_linux_workaround_for_unix_path_len, &server_address, &addrlen)) {
return LIBUS_SOCKET_ERROR;
}
LIBUS_SOCKET_DESCRIPTOR fd = internal_bsd_create_connect_socket_unix(server_path, len, options, &server_address, addrlen);
#if defined(__linux__)
if (dirfd_linux_workaround_for_unix_path_len != -1) {
close(dirfd_linux_workaround_for_unix_path_len);
}
#endif
return fd;
}

View File

@@ -221,10 +221,19 @@ struct us_socket_context_t *us_create_socket_context(int ssl, struct us_loop_t *
/* This path is taken once either way - always BEFORE whatever SSL may do LATER.
* context_ext_size will however be modified larger in case of SSL, to hold SSL extensions */
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
struct us_socket_context_t *context = us_malloc(sizeof(struct us_socket_context_t) + context_ext_size);
context->loop = loop;
context->head_sockets = 0;
context->head_listen_sockets = 0;
context->iterator = 0;
context->next = 0;
context->is_low_prio = default_is_low_prio_handler;
/* Begin at 0 */
context->timestamp = 0;
context->long_timestamp = 0;
context->global_tick = 0;
us_internal_loop_link(loop, context);
/* If we are called from within SSL code, SSL code will make further changes to us */
@@ -242,10 +251,19 @@ struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop
/* This path is taken once either way - always BEFORE whatever SSL may do LATER.
* context_ext_size will however be modified larger in case of SSL, to hold SSL extensions */
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
struct us_socket_context_t *context = us_malloc(sizeof(struct us_socket_context_t) + context_ext_size);
context->loop = loop;
context->head_sockets = 0;
context->head_listen_sockets = 0;
context->iterator = 0;
context->next = 0;
context->is_low_prio = default_is_low_prio_handler;
/* Begin at 0 */
context->timestamp = 0;
context->long_timestamp = 0;
context->global_tick = 0;
us_internal_loop_link(loop, context);
/* If we are called from within SSL code, SSL code will make further changes to us */
@@ -312,14 +330,14 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
return ls;
}
struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_socket_context_t *context, const char *path, size_t pathlen, int options, int socket_ext_size) {
struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_socket_context_t *context, const char *path, int options, int socket_ext_size) {
#ifndef LIBUS_NO_SSL
if (ssl) {
return us_internal_ssl_socket_context_listen_unix((struct us_internal_ssl_socket_context_t *) context, path, pathlen, options, socket_ext_size);
return us_internal_ssl_socket_context_listen_unix((struct us_internal_ssl_socket_context_t *) context, path, options, socket_ext_size);
}
#endif
LIBUS_SOCKET_DESCRIPTOR listen_socket_fd = bsd_create_listen_socket_unix(path, pathlen, options);
LIBUS_SOCKET_DESCRIPTOR listen_socket_fd = bsd_create_listen_socket_unix(path, options);
if (listen_socket_fd == LIBUS_SOCKET_ERROR) {
return 0;
@@ -372,14 +390,14 @@ struct us_socket_t *us_socket_context_connect(int ssl, struct us_socket_context_
return connect_socket;
}
struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_context_t *context, const char *server_path, size_t pathlen, int options, int socket_ext_size) {
struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_context_t *context, const char *server_path, int options, int socket_ext_size) {
#ifndef LIBUS_NO_SSL
if (ssl) {
return (struct us_socket_t *) us_internal_ssl_socket_context_connect_unix((struct us_internal_ssl_socket_context_t *) context, server_path, pathlen, options, socket_ext_size);
return (struct us_socket_t *) us_internal_ssl_socket_context_connect_unix((struct us_internal_ssl_socket_context_t *) context, server_path, options, socket_ext_size);
}
#endif
LIBUS_SOCKET_DESCRIPTOR connect_socket_fd = bsd_create_connect_socket_unix(server_path, pathlen, options);
LIBUS_SOCKET_DESCRIPTOR connect_socket_fd = bsd_create_connect_socket_unix(server_path, options);
if (connect_socket_fd == LIBUS_SOCKET_ERROR) {
return 0;
}

View File

@@ -560,7 +560,7 @@ ssl_on_writable(struct us_internal_ssl_socket_t *s) {
void us_internal_init_loop_ssl_data(struct us_loop_t *loop) {
if (!loop->data.ssl_data) {
struct loop_ssl_data *loop_ssl_data =
us_calloc(1, sizeof(struct loop_ssl_data));
us_malloc(sizeof(struct loop_ssl_data));
loop_ssl_data->ssl_read_input_length = 0;
loop_ssl_data->ssl_read_input_offset = 0;
loop_ssl_data->last_write_was_msg_more = 0;
@@ -806,14 +806,18 @@ int add_ca_cert_to_ctx_store(SSL_CTX *ctx, const char *content,
X509_STORE *store) {
X509 *x = NULL;
BIO *in;
ERR_clear_error(); // clear error stack for SSL_CTX_use_certificate()
int count = 0;
BIO *in = BIO_new_mem_buf(content, strlen(content));
in = BIO_new_mem_buf(content, strlen(content));
if (in == NULL) {
OPENSSL_PUT_ERROR(SSL, ERR_R_BUF_LIB);
goto end;
}
int count = 0;
while ((x = PEM_read_bio_X509(in, NULL, SSL_CTX_get_default_passwd_cb(ctx),
SSL_CTX_get_default_passwd_cb_userdata(ctx)))) {
@@ -1442,9 +1446,9 @@ struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
}
struct us_listen_socket_t *us_internal_ssl_socket_context_listen_unix(
struct us_internal_ssl_socket_context_t *context, const char *path, size_t pathlen,
struct us_internal_ssl_socket_context_t *context, const char *path,
int options, int socket_ext_size) {
return us_socket_context_listen_unix(0, &context->sc, path, pathlen, options,
return us_socket_context_listen_unix(0, &context->sc, path, options,
sizeof(struct us_internal_ssl_socket_t) -
sizeof(struct us_socket_t) +
socket_ext_size);
@@ -1460,10 +1464,10 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
}
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
struct us_internal_ssl_socket_context_t *context, const char *server_path, size_t pathlen,
struct us_internal_ssl_socket_context_t *context, const char *server_path,
int options, int socket_ext_size) {
return (struct us_internal_ssl_socket_t *)us_socket_context_connect_unix(
0, &context->sc, server_path, pathlen, options,
0, &context->sc, server_path, options,
sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) +
socket_ext_size);
}

View File

@@ -29,7 +29,6 @@ void Bun__internal_dispatch_ready_poll(void* loop, void* poll);
#include <unistd.h>
#include <stdint.h>
#include <errno.h>
#include <string.h> // memset
#endif
void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs);
@@ -110,7 +109,7 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
/* Loop */
struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t *loop), void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop), unsigned int ext_size) {
struct us_loop_t *loop = (struct us_loop_t *) us_calloc(1, sizeof(struct us_loop_t) + ext_size);
struct us_loop_t *loop = (struct us_loop_t *) us_malloc(sizeof(struct us_loop_t) + ext_size);
loop->num_polls = 0;
/* These could be accessed if we close a poll before starting the loop */
loop->num_ready_polls = 0;
@@ -287,7 +286,7 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
}
int ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret = kevent64(kqfd, change_list, change_length, NULL, 0, 0, NULL);
// ret should be 0 in most cases (not guaranteed when removing async)
@@ -381,7 +380,6 @@ unsigned int us_internal_accept_poll_event(struct us_poll_t *p) {
#ifdef LIBUS_USE_EPOLL
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_poll_t *p = us_create_poll(loop, fallthrough, sizeof(struct us_internal_callback_t) + ext_size);
memset(p, 0, sizeof(struct us_internal_callback_t) + ext_size);
int timerfd = timerfd_create(CLOCK_REALTIME, TFD_NONBLOCK | TFD_CLOEXEC);
if (timerfd == -1) {
return NULL;
@@ -398,7 +396,7 @@ struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough, unsi
}
#else
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(1, sizeof(struct us_internal_callback_t) + ext_size);
struct us_internal_callback_t *cb = us_malloc(sizeof(struct us_internal_callback_t) + ext_size);
cb->loop = loop;
cb->cb_expects_the_loop = 0;
@@ -458,7 +456,7 @@ void us_timer_close(struct us_timer_t *timer, int fallthrough) {
struct kevent64_s event;
EV_SET64(&event, (uint64_t) (void*) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
/* (regular) sockets are the only polls which are not freed immediately */
if(fallthrough){
@@ -477,7 +475,7 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
struct kevent64_s event;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_TIMER, EV_ADD | (repeat_ms ? 0 : EV_ONESHOT), 0, ms, (uint64_t)internal_cb, 0, 0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
}
#endif
@@ -485,8 +483,6 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
#ifdef LIBUS_USE_EPOLL
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_poll_t *p = us_create_poll(loop, fallthrough, sizeof(struct us_internal_callback_t) + ext_size);
memset(p, 0, sizeof(struct us_internal_callback_t) + ext_size);
us_poll_init(p, eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC), POLL_TYPE_CALLBACK);
struct us_internal_callback_t *cb = (struct us_internal_callback_t *) p;
@@ -526,7 +522,8 @@ void us_internal_async_wakeup(struct us_internal_async *a) {
#define MACHPORT_BUF_LEN 1024
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(1, sizeof(struct us_internal_callback_t) + ext_size);
struct us_internal_callback_t *cb = us_malloc(sizeof(struct us_internal_callback_t) + ext_size);
cb->loop = loop;
cb->cb_expects_the_loop = 1;
cb->leave_poll_ready = 0;
@@ -556,7 +553,7 @@ void us_internal_async_close(struct us_internal_async *a) {
struct kevent64_s event;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
mach_port_deallocate(mach_task_self(), internal_cb->port);
us_free(internal_cb->machport_buf);
@@ -584,7 +581,7 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
event.ext[1] = MACHPORT_BUF_LEN;
event.udata = (uint64_t)(void*)internal_cb;
int ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret = kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
if (UNLIKELY(ret == -1)) {
abort();

View File

@@ -197,7 +197,6 @@ void us_loop_free(struct us_loop_t *loop) {
void us_loop_run(struct us_loop_t *loop) {
us_loop_integrate(loop);
uv_update_time(loop->uv_loop);
uv_run(loop->uv_loop, UV_RUN_ONCE);
}
@@ -225,7 +224,7 @@ struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop,
// timer
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough,
unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(1,
struct us_internal_callback_t *cb = malloc(
sizeof(struct us_internal_callback_t) + sizeof(uv_timer_t) + ext_size);
cb->loop = loop;
@@ -288,7 +287,7 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop,
int fallthrough,
unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(1,
struct us_internal_callback_t *cb = malloc(
sizeof(struct us_internal_callback_t) + sizeof(uv_async_t) + ext_size);
cb->loop = loop;

View File

@@ -289,8 +289,8 @@ struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
int port, int options, int socket_ext_size);
struct us_listen_socket_t *us_internal_ssl_socket_context_listen_unix(
struct us_internal_ssl_socket_context_t *context, const char *path,
size_t pathlen, int options, int socket_ext_size);
struct us_internal_ssl_socket_context_t *context, const char *path,
int options, int socket_ext_size);
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
struct us_internal_ssl_socket_context_t *context, const char *host,
@@ -298,7 +298,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
struct us_internal_ssl_socket_context_t *context, const char *server_path,
size_t pathlen, int options, int socket_ext_size);
int options, int socket_ext_size);
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
const char *data, int length, int msg_more);

View File

@@ -97,14 +97,14 @@ int bsd_would_block();
// listen both on ipv6 and ipv4
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket(const char *host, int port, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, size_t pathlen, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, int options);
/* Creates an UDP socket bound to the hostname and port */
LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port);
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket(const char *host, int port, const char *source_host, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, size_t pathlen, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, int options);
#ifndef MSG_DONTWAIT
#define MSG_DONTWAIT 0

View File

@@ -16,10 +16,6 @@
*/
// clang-format off
#ifndef us_calloc
#define us_calloc calloc
#endif
#ifndef us_malloc
#define us_malloc malloc
#endif
@@ -253,7 +249,7 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
const char *host, int port, int options, int socket_ext_size);
struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_socket_context_t *context,
const char *path, size_t pathlen, int options, int socket_ext_size);
const char *path, int options, int socket_ext_size);
/* listen_socket.c/.h */
void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls);
@@ -263,7 +259,7 @@ struct us_socket_t *us_socket_context_connect(int ssl, struct us_socket_context_
const char *host, int port, const char *source_host, int options, int socket_ext_size);
struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_context_t *context,
const char *server_path, size_t pathlen, int options, int socket_ext_size);
const char *server_path, int options, int socket_ext_size);
/* Is this socket established? Can be used to check if a connecting socket has fired the on_open event yet.
* Can also be used to determine if a socket is a listen_socket or not, but you probably know that already. */

View File

@@ -27,7 +27,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -20,12 +20,11 @@ jobs:
language: c++
fuzz-seconds: 600
- name: Upload crash
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure() && steps.build.outcome == 'success'
with:
name: artifacts
path: ./out/artifacts
if-no-files-found: "error"
build_windows:
runs-on: windows-latest

View File

@@ -575,13 +575,13 @@ public:
/* options, callback, path to unix domain socket */
TemplatedApp &&listen(int options, MoveOnlyFunction<void(us_listen_socket_t *)> &&handler, std::string path) {
handler(httpContext ? httpContext->listen_unix(path.data(), path.length(), options) : nullptr);
handler(httpContext ? httpContext->listen(path.c_str(), options) : nullptr);
return std::move(*this);
}
/* callback, path to unix domain socket */
TemplatedApp &&listen(MoveOnlyFunction<void(us_listen_socket_t *)> &&handler, std::string path, int options) {
handler(httpContext ? httpContext->listen_unix(path.data(), path.length(), options) : nullptr);
TemplatedApp &&listen(MoveOnlyFunction<void(us_listen_socket_t *)> &&handler, std::string path) {
handler(httpContext ? httpContext->listen(path.c_str(), 0) : nullptr);
return std::move(*this);
}

View File

@@ -1,4 +1,3 @@
// clang-format off
/*
* Authored by Alex Hultman, 2018-2020.
* Intellectual property of third-party.
@@ -498,23 +497,12 @@ public:
/* Listen to port using this HttpContext */
us_listen_socket_t *listen(const char *host, int port, int options) {
auto socket = us_socket_context_listen(SSL, getSocketContext(), host, port, options, sizeof(HttpResponseData<SSL>));
// we dont depend on libuv ref for keeping it alive
if (socket) {
us_socket_unref(&socket->s);
}
return socket;
return us_socket_context_listen(SSL, getSocketContext(), host, port, options, sizeof(HttpResponseData<SSL>));
}
/* Listen to unix domain socket using this HttpContext */
us_listen_socket_t *listen_unix(const char *path, size_t pathlen, int options) {
auto* socket = us_socket_context_listen_unix(SSL, getSocketContext(), path, pathlen, options, sizeof(HttpResponseData<SSL>));
// we dont depend on libuv ref for keeping it alive
if (socket) {
us_socket_unref(&socket->s);
}
return socket;
us_listen_socket_t *listen(const char *path, int options) {
return us_socket_context_listen_unix(SSL, getSocketContext(), path, options, sizeof(HttpResponseData<SSL>));
}
};

View File

@@ -207,7 +207,7 @@ namespace uWS
/* This guy really has only 30 bits since we reserve two highest bits to chunked encoding parsing state */
uint64_t remainingStreamingBytes = 0;
const size_t MAX_FALLBACK_SIZE = 1024 * 8;
const size_t MAX_FALLBACK_SIZE = 1024 * 4;
/* Returns UINT_MAX on error. Maximum 999999999 is allowed. */
static uint64_t toUnsignedInteger(std::string_view str) {
@@ -227,6 +227,17 @@ namespace uWS
return unsignedIntegerValue;
}
/* RFC 9110 5.6.2. Tokens */
static inline bool isFieldNameByte(unsigned char c)
{
return (c > 32) & (c < 127) & (c != '(') &
(c != ')') & (c != ',') & (c != '/') &
(c != ':') & (c != ';') & (c != '<') &
(c != '=') & (c != '>') & (c != '?') &
(c != '@') & (c != '[') & (c != '\\') &
(c != ']') & (c != '{') & (c != '}');
}
static inline uint64_t hasLess(uint64_t x, uint64_t n)
{
return (((x) - ~0ULL / 255 * (n)) & ~(x) & ~0ULL / 255 * 128);
@@ -251,56 +262,19 @@ namespace uWS
hasMore(x, 'z');
}
/* RFC 9110 5.6.2. Tokens */
/* Hyphen is not checked here as it is very common */
static inline bool isUnlikelyFieldNameByte(unsigned char c)
{
/* Digits and 14 of the 15 non-alphanum characters (lacking hyphen) */
return ((c == '~') | (c == '|') | (c == '`') | (c == '_') | (c == '^') | (c == '.') | (c == '+')
| (c == '*') | (c == '!')) || ((c >= 48) & (c <= 57)) || ((c <= 39) & (c >= 35));
}
static inline bool isFieldNameByteFastLowercased(unsigned char &in) {
/* Most common is lowercase alpha and hyphen */
if (((in >= 97) & (in <= 122)) | (in == '-')) [[likely]] {
return true;
/* Second is upper case alpha */
} else if ((in >= 65) & (in <= 90)) [[unlikely]] {
in |= 32;
return true;
/* These are rarely used but still valid */
} else if (isUnlikelyFieldNameByte(in)) [[unlikely]] {
return true;
}
return false;
}
static inline void *consumeFieldName(char *p) {
/* Best case fast path (particularly useful with clang) */
while (true) {
while ((*p >= 65) & (*p <= 90)) [[likely]] {
*p |= 32;
p++;
//for (; true; p += 8) {
//uint64_t word;
//memcpy(&word, p, sizeof(uint64_t));
//if (notFieldNameWord(word)) {
while (isFieldNameByte(*(unsigned char *)p)) {
*(p++) |= 0x20;
}
while (((*p >= 97) & (*p <= 122))) [[likely]] {
p++;
}
if (*p == ':') {
return (void *)p;
}
if (*p == '-') {
p++;
} else if (!((*p >= 65) & (*p <= 90))) {
/* Exit fast path parsing */
break;
}
}
/* Generic */
while (isFieldNameByteFastLowercased(*(unsigned char *)p)) {
p++;
}
return (void *)p;
return (void *)p;
//}
//word |= 0x2020202020202020ull;
//memcpy(p, &word, sizeof(uint64_t));
//}
}
/* Puts method as key, target as value and returns non-null (or nullptr on error). */
@@ -513,11 +487,6 @@ namespace uWS
length -= consumed;
consumedTotal += consumed;
/* Even if we could parse it, check for length here as well */
if (consumed > MAX_FALLBACK_SIZE) {
return {0, FULLPTR};
}
/* Store HTTP version (ancient 1.0 or 1.1) */
req->ancientHttp = isAncientHttp;

BIN
packages/bun-vscode/bun.lockb Executable file → Normal file

Binary file not shown.

Binary file not shown.

View File

@@ -66,4 +66,4 @@ dep lshpack liblshpack.a
if [ "$BUILT_ANY" -eq 0 ]; then
printf "(run with -f to rebuild)\n"
fi
fi

View File

@@ -18,6 +18,7 @@ if [ -z "$PKG" ]; then
exit 1
fi
url="https://github.com/oven-sh/WebKit/releases/download/autobuild-$TAG/$PKG.tar.gz"
old_tar_dir="$(dirname "$0")/../.webkit-cache"
@@ -53,10 +54,4 @@ fi
tar -xzf "$tar" -C "$(dirname "$OUTDIR")" || (rm "$tar" && exit 1)
# We want to make sure we use the system-version of icucore on macOS
if [ "$(uname)" == "Darwin" ]; then
# delete the unicode folder from include
rm -rf "$OUTDIR/include/unicode"
fi
echo "$TAG-$PKG" >"$OUTDIR/.tag"
echo "$TAG-$PKG" > "$OUTDIR/.tag"

View File

@@ -1,7 +1,7 @@
push-location .cache
try {
git clone https://github.com/zigtools/zls
set-location zls
git checkout a6786e1c324d773f9315f44c0ad976ef192d5493
..\zig\zig.exe build -Doptimize=ReleaseFast
} finally { Pop-Location }
push-location .cache
try {
git clone https://github.com/zigtools/zls
set-location zls
git checkout 62f17abe283bfe0ff2710c380c620a5a6e413996
..\zig\zig.exe build -Doptimize=ReleaseFast
} finally { Pop-Location }

View File

@@ -10,8 +10,6 @@ const Global = bun.Global;
const Environment = bun.Environment;
const Syscall = bun.sys;
const exe_suffix = bun.exe_suffix;
const w = std.os.windows;
pub const StandaloneModuleGraph = struct {
@@ -508,7 +506,7 @@ pub const StandaloneModuleGraph = struct {
bun.C.moveOpenedFileAtLoose(fd, bun.toFD(root_dir.fd), outfile_slice, true).unwrap() catch |err| {
if (err == error.EISDIR) {
Output.errGeneric("{} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.utf16(outfile_slice)});
Output.errGeneric("{} is a directory. Please choose a different --outfile or delete the directory", .{std.unicode.fmtUtf16le(outfile_slice)});
} else {
Output.err(err, "failed to move executable to result path", .{});
}
@@ -570,8 +568,7 @@ pub const StandaloneModuleGraph = struct {
}
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {
// Do not invoke libuv here.
const self_exe = openSelf() catch return null;
const self_exe = bun.toLibUVOwnedFD(openSelf() catch return null);
defer _ = Syscall.close(self_exe);
var trailer_bytes: [4096]u8 = undefined;
@@ -665,6 +662,8 @@ pub const StandaloneModuleGraph = struct {
return try StandaloneModuleGraph.fromBytes(allocator, to_read, offsets);
}
const exe_suffix = if (Environment.isWindows) ".exe" else "";
fn isBuiltInExe(argv0: []const u8) bool {
if (argv0.len == 0) return false;
@@ -747,7 +746,7 @@ pub const StandaloneModuleGraph = struct {
const nt_path = bun.strings.addNTPathPrefix(&nt_path_buf, image_path);
return bun.sys.openFileAtWindows(
bun.FileDescriptor.cwd(),
bun.invalid_fd,
nt_path,
// access_mask
w.SYNCHRONIZE | w.GENERIC_READ,

View File

@@ -258,18 +258,6 @@ fn HashMapMixin(
return self.hasContext(key, undefined);
}
pub fn hasWithHash(self: *Self, key_hash: u64) bool {
assert(key_hash != Self.empty_hash);
for (self.entries[key_hash >> self.shift ..]) |entry| {
if (entry.hash >= key_hash) {
return entry.hash == key_hash;
}
}
return false;
}
pub fn hasContext(self: *Self, key: K, ctx: Context) bool {
const hash = ctx.hash(key);
assert(hash != Self.empty_hash);

View File

@@ -99,9 +99,6 @@ pub fn exitWide(code: u32) noreturn {
runExitCallbacks();
Output.flush();
std.mem.doNotOptimizeAway(&Bun__atexit);
if (Environment.isWindows) {
bun.windows.libuv.uv_library_shutdown();
}
std.c.exit(@bitCast(code));
}

View File

@@ -5,17 +5,12 @@ const Environment = @import("./env.zig");
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
const bun = @import("root").bun;
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
/// Checks if a slice's pointer is contained within another slice.
pub inline fn isSliceInBuffer(comptime T: type, slice: []const T, buffer: []const T) bool {
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
(@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer.ptr) + buffer.len));
}
/// Checks if a slice's pointer is contained within another slice.
/// If you need to make this generic, use isSliceInBufferT.
pub fn isSliceInBuffer(slice: []const u8, buffer: []const u8) bool {
return isSliceInBufferT(u8, slice, buffer);
}
pub fn sliceRange(slice: []const u8, buffer: []const u8) ?[2]u32 {
return if (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
(@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer.ptr) + buffer.len))
@@ -314,7 +309,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
}
pub fn exists(self: *const Self, value: ValueType) bool {
return isSliceInBuffer(value, &self.backing_buf);
return isSliceInBuffer(u8, value, &self.backing_buf);
}
pub fn editableSlice(slice: []const u8) []u8 {

View File

@@ -277,8 +277,6 @@ pub const GenerateHeader = struct {
var platform_: ?Analytics.Platform = null;
pub const Platform = Analytics.Platform;
var linux_kernel_version: Semver.Version = undefined;
pub fn forOS() Analytics.Platform {
if (platform_ != null) return platform_.?;
@@ -287,11 +285,6 @@ pub const GenerateHeader = struct {
return platform_.?;
} else if (comptime Environment.isPosix) {
platform_ = forLinux();
const release = bun.sliceTo(&linux_os_name.release, 0);
const sliced_string = Semver.SlicedString.init(release, release);
const result = Semver.Version.parse(sliced_string);
linux_kernel_version = result.version.min();
} else {
platform_ = Platform{
.os = Analytics.OperatingSystem.windows,
@@ -308,9 +301,11 @@ pub const GenerateHeader = struct {
@compileError("This function is only implemented on Linux");
}
_ = forOS();
const release = bun.sliceTo(&linux_os_name.release, 0);
const sliced_string = Semver.SlicedString.init(release, release);
const result = Semver.Version.parse(sliced_string);
// we only care about major, minor, patch so we don't care about the string
return linux_kernel_version;
return result.version.min();
}
pub fn forLinux() Analytics.Platform {

Some files were not shown because too many files have changed in this diff Show More