Compare commits

..

1 Commits

Author SHA1 Message Date
Zack Radisic
8a3154098c Run zig fmt 2024-02-12 17:41:31 -06:00
701 changed files with 32087 additions and 100563 deletions

View File

@@ -1,50 +0,0 @@
name: Setup Bun
description: An internal version of the 'oven-sh/setup-bun' action.
inputs:
bun-version:
type: string
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.0.0', etc."
default: latest
required: false
baseline:
type: boolean
description: "Whether to use the baseline version of bun."
default: false
required: false
download-url:
type: string
description: "The base URL to download bun from."
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
required: false
runs:
using: composite
steps:
- name: Setup Bun
shell: bash
run: |
case "$(uname -s)" in
Linux*) os=linux;;
Darwin*) os=darwin;;
*) os=windows;;
esac
case "$(uname -m)" in
arm64 | aarch64) arch=arm64;;
*) arch=x64;;
esac
case "${{ inputs.baseline }}" in
true | 1) target="bun-${os}-${arch}-baseline";;
*) target="bun-${os}-${arch}";;
esac
case "${{ inputs.bun-version }}" in
latest) release="latest";;
canary) release="canary";;
*) release="bun-v${{ inputs.bun-version }}";;
esac
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
unzip ${target}.zip
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}

View File

@@ -9,7 +9,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -18,11 +17,11 @@ on:
- "build.zig"
- "Makefile"
- "Dockerfile"
- ".github/workflows/bun-linux-aarch64.yml"
pull_request:
branches:
- main
paths:
- ".github/workflows/bun-linux-aarch64.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -31,6 +30,7 @@ on:
- "build.zig"
- "Makefile"
- "Dockerfile"
- ".github/workflows/bun-linux-aarch64.yml"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@@ -51,14 +51,14 @@ jobs:
runner: linux-arm64
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
submodules: false
ref: ${{github.sha}}
clean: true
- run: |
bash ./scripts/update-submodules.sh
- uses: docker/setup-buildx-action@v3
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
@@ -66,7 +66,7 @@ jobs:
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -74,7 +74,7 @@ jobs:
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
@@ -113,16 +113,14 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1

View File

@@ -9,7 +9,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -22,7 +21,6 @@ on:
branches:
- main
paths:
- ".github/workflows/bun-linux-build.yml"
- "src/**/*"
- "test/**/*"
- "packages/bun-usockets/src/**/*"
@@ -49,7 +47,7 @@ jobs:
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
runner: big-ubuntu
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
@@ -58,7 +56,7 @@ jobs:
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: namespace-profile-bun-linux-x64
runner: big-ubuntu
build_machine_arch: x86_64
assertions: "OFF"
zig_optimize: "ReleaseFast"
@@ -88,20 +86,28 @@ jobs:
submodules: recursive
ref: ${{github.sha}}
clean: true
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
@@ -110,6 +116,12 @@ jobs:
GIT_SHA=${{github.sha}}
ASSERTIONS=${{matrix.assertions}}
ZIG_OPTIMIZE=${{matrix.zig_optimize}}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{matrix.build_arch}}
target: ${{matrix.target}}
outputs: type=local,dest=${{runner.temp}}/release
@@ -142,16 +154,22 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -190,7 +208,7 @@ jobs:
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
linux-test:
name: Tests ${{matrix.tag}}
runs-on: namespace-profile-bun-linux-x64
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
@@ -216,7 +234,7 @@ jobs:
clean: true
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -257,18 +275,19 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
ulimit -c unlimited
ulimit -c
node packages/bun-internal-test/src/runner.node.mjs || true
# - uses: actions/upload-artifact@v4
# if: steps.test.outputs.failing_tests != ''
# with:
# name: cores
# path: /cores
# if-no-files-found: "error"
- uses: actions/upload-artifact@v3
if: steps.test.outputs.failing_tests != ''
with:
name: cores
path: /cores
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:

View File

@@ -37,7 +37,7 @@ on:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -50,15 +50,22 @@ jobs:
# - name: Checkout submodules
# run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -77,11 +84,10 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
@@ -135,7 +141,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -153,17 +159,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -223,7 +228,6 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
@@ -231,11 +235,10 @@ jobs:
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS-link:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -282,19 +285,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -309,7 +312,6 @@ jobs:
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
@@ -328,16 +330,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -394,12 +394,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -426,7 +426,6 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -37,7 +37,7 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -53,17 +53,31 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# This was used before, but also does not really work
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
@@ -71,15 +85,22 @@ jobs:
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
runs-on: ${{ matrix.runner }}
@@ -125,7 +146,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -143,17 +164,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -213,7 +233,6 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
@@ -221,11 +240,10 @@ jobs:
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -244,7 +262,7 @@ jobs:
runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
@@ -268,19 +286,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -294,7 +312,6 @@ jobs:
cd ${{runner.temp}}/link-build
cmake $SRC_DIR \
-G Ninja \
-DUSE_LTO=ON \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
@@ -314,16 +331,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -381,12 +396,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -413,7 +428,6 @@ jobs:
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -37,7 +37,7 @@ on:
jobs:
macOS-zig:
name: macOS Zig Object
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
@@ -50,17 +50,31 @@ jobs:
tag: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: .
push: false
# This doesnt seem to work
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
# This was used before, but also does not really work
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
@@ -68,15 +82,22 @@ jobs:
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{ matrix.arch }}-macos-none
GIT_SHA=${{ github.sha }}
SCCACHE_BUCKET=bun
SCCACHE_REGION=auto
SCCACHE_S3_USE_SSL=true
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
macOS-dependencies:
name: macOS Dependencies
@@ -123,7 +144,7 @@ jobs:
- name: Cache submodule dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -141,17 +162,16 @@ jobs:
- name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
if-no-files-found: "error"
macOS-cpp:
name: macOS C++
@@ -212,18 +232,16 @@ jobs:
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
bash compile-cpp-only.sh -v
- name: Upload C++
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: "error"
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -242,7 +260,7 @@ jobs:
runner: macos-12-large
artifact: bun-obj-darwin-x64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force
@@ -266,19 +284,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
@@ -293,7 +311,6 @@ jobs:
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
@@ -312,16 +329,14 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -378,12 +393,12 @@ jobs:
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
@@ -411,7 +426,6 @@ jobs:
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TMPDIR: ${{runner.temp}}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |

View File

@@ -51,7 +51,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
@@ -81,7 +81,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
@@ -105,7 +105,7 @@ jobs:
working-directory: packages/bun-types
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Node.js
uses: actions/setup-node@v3
with:
@@ -170,12 +170,12 @@ jobs:
suffix: -distroless
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Docker emulator
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
@@ -192,12 +192,12 @@ jobs:
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64
@@ -216,7 +216,7 @@ jobs:
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
@@ -252,7 +252,7 @@ jobs:
working-directory: packages/bun-release
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:

View File

@@ -18,7 +18,7 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1

View File

@@ -53,15 +53,20 @@ jobs:
cpu: [haswell, nehalem]
arch: [x86_64]
name: Zig Build
runs-on: namespace-profile-zig-build
runs-on: med-ubuntu
timeout-minutes: 60
if: github.repository_owner == 'oven-sh'
steps:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -74,7 +79,8 @@ jobs:
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT
- name: Compile Zig Object
uses: docker/build-push-action@v5
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -96,11 +102,10 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{runner.temp}}/release/bun-zig.o
if-no-files-found: "error"
windows-dependencies:
name: Dependencies
@@ -133,7 +138,7 @@ jobs:
- name: Try fetch dependencies
id: cache-deps-restore
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
path: bun-deps
key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }}
@@ -160,25 +165,19 @@ jobs:
.\scripts\all-dependencies.ps1
- name: Upload Dependencies
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps/
if-no-files-found: "error"
- name: Cache Dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
path: bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
# TODO(@paperdave): stop relying on this and use bun.exe to build itself.
# we cant do that now because there isn't a tagged release to use.
#
# and at the time of writing, the minimum canary required to work is not
# yet released as it is the one *this* commit.
windows-codegen:
name: Codegen
runs-on: ubuntu-latest
@@ -200,11 +199,10 @@ jobs:
if: ${{ env.canary == 'true' }}
run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build-codegen-win32-x64/
if-no-files-found: "error"
windows-cpp:
name: C++ Build
@@ -225,7 +223,7 @@ jobs:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
@@ -260,11 +258,10 @@ jobs:
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: build/bun-cpp-objects.a
if-no-files-found: "error"
windows-link:
strategy:
@@ -286,22 +283,22 @@ jobs:
version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja
- name: Download Codegen
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build
- name: Download Dependencies
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps
- name: Download Zig Object
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-zig
- name: Download C++ Objects
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-cpp
@@ -334,16 +331,14 @@ jobs:
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive "$Dist" "$Dist.zip"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
if-no-files-found: "error"
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip
if-no-files-found: "error"
- name: Release
id: release
uses: ncipollo/release-action@v1
@@ -398,12 +393,12 @@ jobs:
- run: git config --global core.autocrlf false && git config --global core.eol lf
- id: checkout
name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download Release
uses: actions/download-artifact@v4
uses: actions/download-artifact@v3
with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{runner.temp}}/release
@@ -417,9 +412,6 @@ jobs:
uses: actions/setup-node@v4
with:
node-version: 20
- uses: secondlife/setup-cygwin@v1
with:
packages: bash
- name: Install dependencies
run: |
# bun install --verbose
@@ -434,15 +426,14 @@ jobs:
name: Run tests
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
SHELLOPTS: igncr
BUN_PATH_BASE: ${{runner.temp}}
BUN_PATH: release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe
run: |
node packages/bun-internal-test/src/runner.node.mjs || true
shell: bash
try {
$ErrorActionPreference = "SilentlyContinue"
$null = node packages/bun-internal-test/src/runner.node.mjs ${{runner.temp}}/release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe || $true
} catch {}
$ErrorActionPreference = "Stop"
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:

View File

@@ -1,8 +1,5 @@
name: autofix.ci # Must be named this for autofix.ci to work
permissions:
contents: read
on:
workflow_dispatch:
pull_request:
@@ -13,22 +10,24 @@ on:
env:
ZIG_VERSION: 0.12.0-dev.1828+225fe6ddb
permissions:
contents: read
jobs:
format:
name: format
runs-on: ${{ vars.RUNNER_LINUX_X64 || 'ubuntu-latest' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
src
packages
test
bench
- name: Setup Bun
uses: ./.github/actions/setup-bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.0.21"
- name: Setup Zig

4
.gitignore vendored
View File

@@ -5,7 +5,6 @@ packages/*/*.wasm
*.a
profile.json
.env
node_modules
.envrc
.swcrc
@@ -160,10 +159,9 @@ x64
/.cache
/src/deps/libuv
/build-*/
/kcov-out
.vs
**/.verdaccio-db.json
/test-report.md
/test-report.json
/test-report.json

2
.gitmodules vendored
View File

@@ -82,4 +82,4 @@ url = https://github.com/litespeedtech/ls-hpack.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
fetchRecurseSubmodules = false

View File

@@ -1,5 +0,0 @@
src/bun.js/WebKit
src/deps
test/snapshots
test/js/deno
src/react-refresh.js

View File

@@ -1,15 +0,0 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false,
"quoteProps": "preserve",
"overrides": [
{
"files": ["*.md"],
"options": {
"printWidth": 80
}
}
]
}

View File

@@ -11,7 +11,7 @@
// JavaScript
"oven.bun-vscode",
"esbenp.prettier-vscode",
"biomejs.biome",
// TypeScript
"better-ts-errors.better-ts-errors",
@@ -28,6 +28,6 @@
"tamasfe.even-better-toml",
// Other
"bierner.comment-tagged-templates"
"bierner.comment-tagged-templates",
]
}

182
.vscode/launch.json generated vendored
View File

@@ -18,22 +18,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
"BUN_DEBUG_FileReader": "1"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -47,7 +32,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -61,7 +46,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -75,7 +60,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -89,7 +74,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -108,7 +93,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -128,7 +113,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -144,9 +129,9 @@
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
"console": "internalConsole",
},
{
"type": "lldb",
@@ -158,7 +143,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -172,7 +157,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -186,7 +171,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -200,7 +185,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -219,10 +204,10 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "lldb",
@@ -239,10 +224,10 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// bun test [...]
{
@@ -255,7 +240,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -269,7 +254,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -283,7 +268,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -297,7 +282,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -311,7 +296,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole"
},
@@ -330,7 +315,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -350,7 +335,7 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
@@ -380,7 +365,7 @@
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0"
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole"
},
@@ -399,25 +384,11 @@
},
"console": "internalConsole",
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun install [folder]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["install"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -444,54 +415,11 @@
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_DEBUG_jest",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "1"
}
]
},
{
"type": "cppvsdbg",
"request": "launch",
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1"
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1"
},
{
"name": "BUN_DEBUG_EventLoop",
"value": "1"
},
{
"name": "BUN_DEBUG_uv",
"value": "1"
},
{
"name": "BUN_DEBUG_SYS",
"value": "1"
},
{
"name": "BUN_DEBUG_PipeWriter",
"value": "1"
},
{
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -513,7 +441,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -535,7 +463,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -563,10 +491,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -594,10 +522,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun run [file]
{
@@ -620,7 +548,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -642,7 +570,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -664,7 +592,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -692,10 +620,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -723,10 +651,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun test [...]
{
@@ -749,7 +677,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -771,7 +699,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -793,7 +721,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -815,7 +743,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -837,7 +765,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -865,10 +793,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -896,10 +824,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
// Windows: bun test [*]
{
@@ -922,7 +850,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "2"
}
]
],
},
{
"type": "cppvsdbg",
@@ -944,7 +872,7 @@
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
"value": "0"
}
]
],
},
{
"type": "cppvsdbg",
@@ -972,10 +900,10 @@
}
],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"pattern": "https:\/\/debug.bun.sh\/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
"action": "openExternally"
}
},
},
{
"type": "cppvsdbg",
@@ -985,7 +913,7 @@
"args": ["src/runner.node.mjs"],
"cwd": "${workspaceFolder}/packages/bun-internal-test",
"console": "internalConsole"
}
},
],
"inputs": [
{
@@ -997,6 +925,6 @@
"id": "testName",
"type": "promptString",
"description": "Usage: bun test [...]"
}
},
]
}

145
.vscode/settings.json vendored
View File

@@ -52,37 +52,37 @@
},
// JavaScript
"prettier.enable": true,
"prettier.enable": false,
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
"[javascriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
// TypeScript
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "biomejs.biome"
},
// JSON
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
// Markdown
"[markdown]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.unicodeHighlight.ambiguousCharacters": true,
"editor.unicodeHighlight.invisibleCharacters": true,
"editor.defaultFormatter": "biomejs.biome",
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
@@ -94,12 +94,12 @@
// TOML
"[toml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
// YAML
"[yaml]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.defaultFormatter": "biomejs.biome",
},
// Files
@@ -112,6 +112,7 @@
"**/Thumbs.db": true,
"**/*.xcworkspacedata": true,
"**/*.xcscheme": true,
"**/*.pem": true,
"**/*.xcodeproj": true,
"src/bun.js/WebKit": true,
"src/deps/libarchive": true,
@@ -126,10 +127,126 @@
"src/deps/tinycc": true,
"src/deps/zstd": true,
"**/*.i": true,
"packages/bun-uws/fuzzing/seed-corpus": true
"packages/bun-uws/fuzzing/seed-corpus/**/*": true
},
"files.associations": {
"*.idl": "cpp"
"*.lock": "yarnlock",
"*.idl": "cpp",
"memory": "cpp",
"iostream": "cpp",
"algorithm": "cpp",
"random": "cpp",
"ios": "cpp",
"filesystem": "cpp",
"__locale": "cpp",
"type_traits": "cpp",
"__mutex_base": "cpp",
"__string": "cpp",
"string": "cpp",
"string_view": "cpp",
"typeinfo": "cpp",
"__config": "cpp",
"__nullptr": "cpp",
"exception": "cpp",
"__bit_reference": "cpp",
"atomic": "cpp",
"utility": "cpp",
"sstream": "cpp",
"__functional_base": "cpp",
"new": "cpp",
"__debug": "cpp",
"__errc": "cpp",
"__hash_table": "cpp",
"__node_handle": "cpp",
"__split_buffer": "cpp",
"__threading_support": "cpp",
"__tuple": "cpp",
"array": "cpp",
"bit": "cpp",
"bitset": "cpp",
"cctype": "cpp",
"chrono": "cpp",
"clocale": "cpp",
"cmath": "cpp",
"complex": "cpp",
"condition_variable": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdint": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"deque": "cpp",
"fstream": "cpp",
"functional": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"istream": "cpp",
"iterator": "cpp",
"limits": "cpp",
"locale": "cpp",
"mutex": "cpp",
"optional": "cpp",
"ostream": "cpp",
"ratio": "cpp",
"stack": "cpp",
"stdexcept": "cpp",
"streambuf": "cpp",
"system_error": "cpp",
"thread": "cpp",
"tuple": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"vector": "cpp",
"__bits": "cpp",
"__tree": "cpp",
"map": "cpp",
"numeric": "cpp",
"set": "cpp",
"__memory": "cpp",
"memory_resource": "cpp",
"*.tcc": "cpp",
"list": "cpp",
"shared_mutex": "cpp",
"cinttypes": "cpp",
"variant": "cpp",
"sysctl.h": "c",
"queue": "cpp",
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp",
"xtr1common": "cpp",
"stop_token": "cpp",
"xfacet": "cpp",
"xhash": "cpp",
"xiosbase": "cpp",
"xlocale": "cpp",
"xlocbuf": "cpp",
"xlocinfo": "cpp",
"xlocmes": "cpp",
"xlocmon": "cpp",
"xlocnum": "cpp",
"xloctime": "cpp",
"xmemory": "cpp",
"xstring": "cpp",
"xtree": "cpp",
"xutility": "cpp"
},
"C_Cpp.files.exclude": {
"**/.vscode": true,

View File

@@ -2,8 +2,8 @@ cmake_minimum_required(VERSION 3.22)
cmake_policy(SET CMP0091 NEW)
cmake_policy(SET CMP0067 NEW)
set(Bun_VERSION "1.0.36")
set(WEBKIT_TAG 089023cc9078b3aa173869fd6685f3e7bed2a994)
set(Bun_VERSION "1.0.26")
set(WEBKIT_TAG c3712c13dcdc091cfe4c7cb8f2c1fd16472e6f92)
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
@@ -41,53 +41,10 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
else()
if(ZIG_OPTIMIZE STREQUAL "Debug")
set(bun "bun-debug")
else()
set(bun "bun-profile")
endif()
set(bun "bun-profile")
endif()
endif()
# --- MacOS SDK ---
if(APPLE AND DEFINED ENV{CI})
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
endif()
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
execute_process(COMMAND xcrun --show-sdk-path OUTPUT_VARIABLE SDKROOT)
string(STRIP ${SDKROOT} SDKROOT)
message(STATUS "MacOS SDK path: ${SDKROOT}")
SET(CMAKE_OSX_SYSROOT ${SDKROOT})
execute_process(COMMAND xcrun --sdk macosx --show-sdk-version OUTPUT_VARIABLE MACOSX_DEPLOYMENT_TARGET)
string(STRIP ${MACOSX_DEPLOYMENT_TARGET} MACOSX_DEPLOYMENT_TARGET)
set(CMAKE_OSX_DEPLOYMENT_TARGET ${MACOSX_DEPLOYMENT_TARGET})
# Check if current version of macOS is less than the deployment target and if so, raise an error
execute_process(COMMAND sw_vers -productVersion OUTPUT_VARIABLE MACOS_VERSION)
string(STRIP ${MACOS_VERSION} MACOS_VERSION)
if(MACOS_VERSION VERSION_LESS ${MACOSX_DEPLOYMENT_TARGET})
message(WARNING
"The current version of macOS (${MACOS_VERSION}) is less than the deployment target (${MACOSX_DEPLOYMENT_TARGET}).\n"
"The build will be incompatible with your current device due to mismatches in `icucore` versions.\n"
"To fix this, please either:\n"
" - Upgrade to at least macOS ${MACOSX_DEPLOYMENT_TARGET}\n"
" - Use `xcode-select` to switch to an SDK version <= ${MACOS_VERSION}\n"
" - Set CMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION} (make sure to build all dependencies with this variable set too)"
)
endif()
endif()
if(APPLE)
message(STATUS "Building for macOS v${CMAKE_OSX_DEPLOYMENT_TARGET}")
endif()
# --- LLVM ---
# This detection is a little overkill, but it ensures that the set LLVM_VERSION matches under
# any case possible. Sorry for the complexity...
@@ -270,13 +227,6 @@ set(DEFAULT_USE_DEBUG_JSC, OFF)
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
set(DEFAULT_USE_DEBUG_JSC ON)
set(DEFAULT_LTO OFF)
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if(CI)
set(DEFAULT_LTO ON)
else()
set(DEFAULT_LTO OFF)
endif()
endif()
if(WIN32)
@@ -284,9 +234,9 @@ if(WIN32)
endif()
if(UNIX AND NOT APPLE)
execute_process(COMMAND grep -w "NAME" /etc/os-release OUTPUT_VARIABLE LINUX_DISTRO)
execute_process(COMMAND cat /etc/os-release COMMAND head -n1 OUTPUT_VARIABLE LINUX_DISTRO)
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"|NAME=\"openSUSE Tumbleweed\"\n")
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro|Artix) Linux\"\n")
set(DEFAULT_USE_STATIC_LIBATOMIC OFF)
endif()
endif()
@@ -313,8 +263,6 @@ option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore"
option(USE_UNIFIED_SOURCES "Use unified sources to speed up the build" OFF)
option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of libatomic.a" ${DEFAULT_USE_STATIC_LIBATOMIC})
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
if(USE_VALGRIND)
# Disable SIMD
set(USE_BASELINE_BUILD ON)
@@ -337,11 +285,9 @@ endif()
set(ERROR_LIMIT 100 CACHE STRING "Maximum number of errors to show when compiling C++ code")
set(ARCH x86_64)
set(HOMEBREW_PREFIX "/usr/local")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
set(ARCH aarch64)
set(HOMEBREW_PREFIX "/opt/homebrew")
endif()
if(NOT CPU_TARGET)
@@ -428,11 +374,10 @@ elseif(NOT BUN_CPP_ONLY AND NOT BUN_LINK_ONLY)
endif()
# Bun
find_program(BUN_EXECUTABLE bun ${REQUIRED_IF_NOT_ONLY_CPP_OR_LINK} DOC "Path to an already built release of Bun")
message(STATUS "Found Bun: ${BUN_EXECUTABLE}")
if(WIN32 AND NO_CODEGEN)
# TODO(@paperdave): remove this, see bun-windows.yml's comment.
if(NOT WIN32)
find_program(BUN_EXECUTABLE bun ${REQUIRED_IF_NOT_ONLY_CPP_OR_LINK} DOC "Path to an already built release of Bun")
message(STATUS "Found Bun: ${BUN_EXECUTABLE}")
else()
set(BUN_EXECUTABLE "echo")
endif()
@@ -482,13 +427,7 @@ if(NOT WEBKIT_DIR)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-debug")
set(ASSERT_ENABLED "1")
elseif(NOT DEBUG AND NOT WIN32)
# Avoid waiting for LTO in local release builds outside of CI
if(USE_LTO)
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
else()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "")
endif()
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-lto")
set(ASSERT_ENABLED "0")
endif()
@@ -523,13 +462,6 @@ if(NOT WEBKIT_DIR)
endif()
set(WEBKIT_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include")
if(APPLE)
set(ICU_INCLUDE_DIR "")
else()
set(ICU_INCLUDE_DIR "${BUN_WORKDIR}/bun-webkit/include/wtf/unicode")
endif()
set(WEBKIT_LIB_DIR "${BUN_WORKDIR}/bun-webkit/lib")
elseif(WEBKIT_DIR STREQUAL "omit")
message(STATUS "Not using WebKit. This is only valid if you are only trying to build Zig code")
@@ -650,7 +582,7 @@ add_custom_command(
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses+DOMIsoSubspaces.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses+lazyStructureImpl.h"
"${BUN_WORKDIR}/codegen/ZigGeneratedClasses.zig"
COMMAND ${BUN_EXECUTABLE} run "${BUN_CODEGEN_SRC}/generate-classes.ts" ${BUN_CLASSES_TS} "${BUN_WORKDIR}/codegen"
COMMAND ${BUN_EXECUTABLE} "${BUN_CODEGEN_SRC}/generate-classes.ts" ${BUN_CLASSES_TS} "${BUN_WORKDIR}/codegen"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
MAIN_DEPENDENCY "${BUN_CODEGEN_SRC}/generate-classes.ts"
DEPENDS ${BUN_CLASSES_TS}
@@ -663,12 +595,11 @@ list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/ZigGeneratedClasses.cpp")
add_custom_command(
OUTPUT "${BUN_WORKDIR}/codegen/JSSink.cpp"
"${BUN_WORKDIR}/codegen/JSSink.h"
COMMAND ${BUN_EXECUTABLE} run "src/codegen/generate-jssink.ts" "${BUN_WORKDIR}/codegen"
COMMAND ${BUN_EXECUTABLE} "src/codegen/generate-jssink.ts" "${BUN_WORKDIR}/codegen"
VERBATIM
MAIN_DEPENDENCY "src/codegen/generate-jssink.ts"
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Generating JSSink"
USES_TERMINAL
)
list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/JSSink.cpp")
@@ -691,7 +622,7 @@ if(NOT BUN_LINK_ONLY)
OUTPUT ${_output}
MAIN_DEPENDENCY ${BUN_HASH_LUT_GENERATOR}
DEPENDS ${_input}
COMMAND ${BUN_EXECUTABLE} run ${BUN_HASH_LUT_GENERATOR} ${_input} ${_output}
COMMAND ${BUN_EXECUTABLE} ${BUN_HASH_LUT_GENERATOR} ${_input} ${_output}
VERBATIM
COMMENT "Generating ${_display_name}"
)
@@ -765,7 +696,7 @@ if(NOT NO_CODEGEN)
"${BUN_WORKDIR}/codegen/NativeModuleImpl.h"
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
"${BUN_WORKDIR}/codegen/SyntheticModuleType.h"
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-modules.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
COMMAND ${BUN_EXECUTABLE} "${BUN_SRC}/codegen/bundle-modules.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
DEPENDS ${BUN_TS_MODULES} ${CODEGEN_FILES}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Bundling JS modules"
@@ -780,7 +711,7 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
add_custom_command(
OUTPUT "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-functions.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
COMMAND ${BUN_EXECUTABLE} "${BUN_SRC}/codegen/bundle-functions.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
DEPENDS ${BUN_TS_FUNCTIONS} ${CODEGEN_FILES}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
COMMENT "Bundling JS builtin functions"
@@ -946,10 +877,6 @@ else()
add_compile_definitions("ASSERT_ENABLED=1")
endif()
if(ICU_INCLUDE_DIR)
include_directories(${ICU_INCLUDE_DIR})
endif()
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}/packages/
${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets
@@ -1009,14 +936,6 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
-Werror=return-type
-Werror=return-stack-address
-Werror=implicit-function-declaration
-Werror=uninitialized
-Werror=conditional-uninitialized
-Werror=suspicious-memaccess
-Werror=move
-Werror=sometimes-uninitialized
-Werror=unused
-Wno-unused-function
-Werror
)
else()
target_compile_options(${bun} PUBLIC /Od /Z7)
@@ -1024,36 +943,15 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
add_compile_definitions("BUN_DEBUG=1")
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(LTO_FLAG "")
if(NOT WIN32)
if(USE_LTO)
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
endif()
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
target_compile_options(${bun} PUBLIC -O3 ${LTO_FLAG} -g1
target_compile_options(${bun} PUBLIC -O3 -flto=full -emit-llvm -g1
-Werror=return-type
-Werror=return-stack-address
-Werror=implicit-function-declaration
-Werror=uninitialized
-Werror=conditional-uninitialized
-Werror=suspicious-memaccess
-Werror=move
-Werror=sometimes-uninitialized
-Werror
)
else()
set(LTO_LINK_FLAG "")
if(USE_LTO)
# -emit-llvm seems to not be supported or under a different name on Windows.
list(APPEND LTO_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "/LTCG")
endif()
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG} /DEBUG /Z7)
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG)
target_compile_options(${bun} PUBLIC /O2 -flto=full /DEBUG /Z7)
target_link_options(${bun} PUBLIC /LTCG /DEBUG)
endif()
endif()
@@ -1105,6 +1003,12 @@ else()
endif()
if(APPLE)
if(ARCH STREQUAL "x86_64")
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
else()
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
endif()
target_link_options(${bun} PUBLIC "-dead_strip")
target_link_options(${bun} PUBLIC "-dead_strip_dylibs")
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
@@ -1170,7 +1074,21 @@ endif()
# --- ICU ---
if(APPLE)
# TODO: a much better check can be done to find this path
find_path(
ICU4C_DIR NAMES lib/libicudata.a
PATHS ENV PATH /usr/local/opt/icu4c /opt/homebrew/opt/icu4c
)
find_path(
ICONV_DIR NAMES lib/libiconv.a
PATHS ENV PATH /usr/local/opt/libiconv /opt/homebrew/opt/libiconv
)
target_link_libraries(${bun} PRIVATE "icucore")
target_link_libraries(${bun} PRIVATE "${ICONV_DIR}/lib/libiconv.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicudata.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicui18n.a")
target_link_libraries(${bun} PRIVATE "${ICU4C_DIR}/lib/libicuuc.a")
endif()
# --- Stripped Binary "bun"
@@ -1191,10 +1109,9 @@ endif()
if(WIN32)
# Kill all instances of bun before linking.
# This is necessary because the file is locked by the process.
add_custom_command(
TARGET ${bun}
PRE_LINK
PRE_BUILD
COMMAND
"powershell"
"/C"
@@ -1359,7 +1276,6 @@ if(USE_STATIC_SQLITE)
"SQLITE_ENABLE_FTS3_PARENTHESIS=1"
"SQLITE_ENABLE_FTS5=1"
"SQLITE_ENABLE_JSON1=1"
"SQLITE_ENABLE_MATH_FUNCTIONS=1"
)
target_link_libraries(${bun} PRIVATE sqlite3)
message(STATUS "Using static sqlite3")
@@ -1371,7 +1287,6 @@ endif()
if(USE_CUSTOM_LSHPACK)
include_directories(${BUN_DEPS_DIR}/ls-hpack)
if(WIN32)
include_directories(${BUN_DEPS_DIR}/ls-hpack/compat/queue)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/lshpack.lib")
@@ -1419,4 +1334,4 @@ endif()
if(NO_CODEGEN)
message(STATUS "NOTE: NO_CODEGEN is ON, this build expects ./codegen to exist")
endif()
endif()

View File

@@ -16,7 +16,7 @@ ARG BUILD_MACHINE_ARCH=x86_64
ARG BUILDARCH=amd64
ARG TRIPLET=${ARCH}-linux-gnu
ARG GIT_SHA=""
ARG BUN_VERSION="bun-v1.0.30"
ARG BUN_VERSION="bun-v1.0.7"
ARG BUN_DOWNLOAD_URL_BASE="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${BUN_VERSION}"
ARG CANARY=0
ARG ASSERTIONS=OFF
@@ -372,7 +372,7 @@ ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache mkdir ${BUN_DIR}/build \
&& cd ${BUN_DIR}/build \
&& mkdir -p tmp_modules tmp_functions js codegen \
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_LTO=ON -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
&& bash compile-cpp-only.sh -v
FROM bun-base-with-zig as bun-codegen-for-zig
@@ -419,7 +419,6 @@ RUN mkdir -p build \
&& cmake .. \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
-DCPU_TARGET="${CPU_TARGET}" \
-DZIG_TARGET="${TRIPLET}" \
@@ -477,7 +476,6 @@ RUN cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
-DUSE_LTO=ON \
-DUSE_DEBUG_JSC=${ASSERTIONS} \
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
@@ -542,7 +540,6 @@ RUN cmake .. \
-DNO_CONFIGURE_DEPENDS=1 \
-DCANARY="${CANARY}" \
-DZIG_COMPILER=system \
-DUSE_LTO=ON \
&& ninja -v \
&& ./bun --revision \
&& mkdir -p /build/out \

View File

@@ -1722,7 +1722,7 @@ sizegen:
# Linux uses bundled SQLite3
ifeq ($(OS_NAME),linux)
sqlite:
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 -DSQLITE_ENABLE_MATH_FUNCTIONS=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
endif
picohttp:

View File

@@ -24,9 +24,9 @@
## What is Bun?
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
@@ -34,12 +34,12 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
bun run index.tsx # TS and JSX supported out-of-the-box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
```bash
bun test # run tests
bun run start # run the `start` script in `package.json`
bun install <pkg> # install a package
bun install <pkg> # install a package
bunx cowsay 'Hello, world!' # execute a package
```

Binary file not shown.

View File

@@ -12,7 +12,7 @@
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"string-width": "^7.0.0",
"zx": "^7.2.3"
},
"scripts": {

File diff suppressed because one or more lines are too long

View File

@@ -1,10 +1,9 @@
import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { mkdirSync, writeFileSync } from "fs";
import { bench, run } from "./runner.mjs";
import { cp } from "fs/promises";
import { join } from "path";
import { tmpdir } from "os";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {
const root = join(tmpdir(), "huge");
const base = join(root, "directory", "for", "benchmarks", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10");
@@ -19,21 +18,14 @@ const hugeDirectory = (() => {
const hugeFilePath = join(tmpdir(), "huge-file-0.txt");
const hugeText = "Hello, world!".repeat(1000000);
writeFileSync(hugeFilePath, hugeText);
let base = process.argv.at(-1);
if (resolve(base) === fileURLToPath(import.meta.url)) {
base = tmpdir();
} else {
rmSync(base, { recursive: true, force: true });
mkdirSync(base, { recursive: true });
}
var hugeCopyI = 0;
bench("cp -r (1000 files)", async b => {
await cp(hugeDirectory, join(base, "huge-copy" + hugeCopyI++), { recursive: true });
await cp(hugeDirectory, join(tmpdir(), "huge-copy" + hugeCopyI++), { recursive: true });
});
bench("cp 1 " + ((hugeText.length / 1024) | 0) + " KB file", async b => {
await cp(hugeFilePath, join(base, "huge-file" + hugeCopyI++));
await cp(hugeFilePath, join(tmpdir(), "huge-file" + hugeCopyI++));
});
await run();

View File

@@ -5,11 +5,6 @@ const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
const callback = noop.callback;
bench("C++ callback into JS", () => {
callback(() => {});
});
bench("C++ fn regular", () => {
regular();

View File

@@ -1,73 +0,0 @@
import { tmpdir } from "node:os";
import { bench, group, run } from "./runner.mjs";
import { createReadStream, writeFileSync } from "node:fs";
import { sep } from "node:path";
if (!Promise.withResolvers) {
Promise.withResolvers = function () {
let resolve, reject;
const promise = new Promise((res, rej) => {
resolve = res;
reject = rej;
});
return { promise, resolve, reject };
};
}
const ALLOW_BUN = typeof Bun !== "undefined";
const ALLOW_NODE = true;
const dir = tmpdir() + sep;
var short = (function () {
const text = "Hello World!";
const path = dir + "bun-bench-short.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var shortUTF16 = (function () {
const text = "Hello World 💕💕💕";
const path = dir + "bun-bench-shortUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var long = (function () {
const text = "Hello World!".repeat(1024);
const path = dir + "bun-bench-long.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var longUTF16 = (function () {
const text = "Hello World 💕💕💕".repeat(15 * 70192);
const path = dir + "bun-bench-longUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
async function bun(path) {
for await (const chunk of Bun.file(path).stream()) {
chunk;
}
}
async function node(path) {
const { promise, resolve } = Promise.withResolvers();
const stream = createReadStream(path);
stream.on("data", chunk => {});
stream.on("end", () => resolve());
await promise;
}
ALLOW_BUN && bench("short - bun", () => bun(short.path));
ALLOW_NODE && bench("short - node", () => node(short.path));
ALLOW_BUN && bench("shortUTF16 - bun", () => bun(shortUTF16.path));
ALLOW_NODE && bench("shortUTF16 - node", () => node(shortUTF16.path));
ALLOW_BUN && bench("long - bun", () => bun(long.path));
ALLOW_NODE && bench("long - node", () => node(long.path));
ALLOW_BUN && bench("longUTF16 - bun", () => bun(longUTF16.path));
ALLOW_NODE && bench("longUTF16 - node", () => node(longUTF16.path));
await run();

View File

@@ -3,38 +3,41 @@ import npmStringWidth from "string-width";
const bunStringWidth = globalThis?.Bun?.stringWidth;
const stringWidth = bunStringWidth || npmStringWidth;
const formatter = new Intl.NumberFormat();
const format = n => {
return formatter.format(n);
};
bench("npm/string-width (ansi + emoji + ascii)", () => {
npmStringWidth("hello there! 😀\u001b[31m😀😀");
});
const inputs = [
["hello", "ascii"],
["[31mhello", "ascii+ansi"],
["hello😀", "ascii+emoji"],
["[31m😀😀", "ansi+emoji"],
["😀hello😀[31m😀😀😀", "ansi+emoji+ascii"],
];
bench("npm/string-width (ansi + emoji)", () => {
npmStringWidth("😀\u001b[31m😀😀");
});
const repeatCounts = [1, 10, 100, 1000, 5000];
bench("npm/string-width (ansi + ascii)", () => {
npmStringWidth("\u001b[31mhello there!");
});
const maxInputLength = Math.max(...inputs.map(([input]) => input.repeat(Math.max(...repeatCounts)).length));
if (bunStringWidth) {
bench("Bun.stringWidth (ansi + emoji + ascii)", () => {
bunStringWidth("hello there! 😀\u001b[31m😀😀");
});
for (const [input, textLabel] of inputs) {
for (let repeatCount of repeatCounts) {
const label = bunStringWidth ? "Bun.stringWidth" : "npm/string-width";
bench("Bun.stringWidth (ansi + emoji)", () => {
bunStringWidth("😀\u001b[31m😀😀");
});
const str = input.repeat(repeatCount);
const name = `${label} ${format(str.length).padStart(format(maxInputLength).length, " ")} chars ${textLabel}`;
bench("Bun.stringWidth (ansi + ascii)", () => {
bunStringWidth("\u001b[31mhello there!");
});
bench(name, () => {
stringWidth(str);
});
if (npmStringWidth("😀\u001b[31m😀😀") !== bunStringWidth("😀\u001b[31m😀😀")) {
console.error("string-width mismatch");
}
if (bunStringWidth && bunStringWidth(str) !== npmStringWidth(str)) {
throw new Error("string-width mismatch");
}
if (npmStringWidth("hello there! 😀\u001b[31m😀😀") !== bunStringWidth("hello there! 😀\u001b[31m😀😀")) {
console.error("string-width mismatch");
}
if (npmStringWidth("\u001b[31mhello there!") !== bunStringWidth("\u001b[31mhello there!")) {
console.error("string-width mismatch");
}
}

View File

@@ -7,8 +7,8 @@
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"deps": "npm install && bash src/download.sh",
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
"deps": "npm install && sh src/download.sh",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

70
biome.json Normal file
View File

@@ -0,0 +1,70 @@
{
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json",
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": false
},
"javascript": {
"parser": {
"unsafeParameterDecoratorsEnabled": true
},
"formatter": {
"arrowParentheses": "asNeeded",
"quoteProperties": "preserve",
"semicolons": "always",
"trailingComma": "all",
"indentStyle": "space",
"quoteStyle": "double"
}
},
"json": {
"formatter": {
"indentStyle": "space"
},
"parser": {
"allowComments": true,
"allowTrailingCommas": true
}
},
"vcs": {
"clientKind": "git",
"enabled": false,
"root": "./"
},
"files": {
"maxSize": 9128312873
},
"formatter": {
"enabled": true,
"indentWidth": 2,
"lineEnding": "lf",
"formatWithErrors": true,
"lineWidth": 120,
"indentStyle": "space",
"ignore": [
"node_modules/**",
"test/snapshots",
"test/fixtures",
".next",
"test/js/deno",
"./src/deps",
"./src/bun.js/WebKit/**",
"packages/bun-polyfills",
"./build-*",
"./build",
".cache",
"out/",
"test/transpiler/property-non-ascii-fixture.js",
"test/transpiler/macro-test.test.ts",
"test/transpiler/decorator-metadata.test.ts",
"src/react-refresh.js",
"bindings-obj/*",
"src/deps/**",
"./bench/react-hello-world/react-hello-world.node.js",
"./test/cli/run/require-cache-bug-leak-fixture-large-ast.js",
"./test/cli/run/esm-leak-fixture-large-ast.mjs"
]
}
}

View File

@@ -1,7 +1,7 @@
const std = @import("std");
const pathRel = std.fs.path.relative;
const builtin = @import("builtin");
const Wyhash11 = @import("./src/wyhash.zig").Wyhash11;
const Wyhash = @import("./src/wyhash.zig").Wyhash;
const zig_version = builtin.zig_version;
@@ -84,7 +84,7 @@ const BunBuildOptions = struct {
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const runtime_hash = Wyhash11.hash(
const runtime_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
@@ -97,7 +97,7 @@ const BunBuildOptions = struct {
if (std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const fallback_hash = Wyhash11.hash(
const fallback_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);

BIN
bun.lockb

Binary file not shown.

View File

@@ -6,4 +6,3 @@
#
# Instead, we can only scan the test directory for Bun's runtime tests
root = "test"
preload = "./test/preload.ts"

View File

@@ -2,7 +2,7 @@
name: bun
appspec: { version: "0.001" }
plugins: [-Meta]
title: A tool for installing and managing JavaScript packages
title: A tool for installing and managing Python packages
options:
- version|V --Show version and exit

View File

@@ -96,16 +96,18 @@ FROM alpine:3.18
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
COPY --from=build /tmp/glibc.apk /tmp/
COPY --from=build /tmp/glibc-bin.apk /tmp/
COPY --from=build /usr/local/bin/bun /usr/local/bin/
COPY docker-entrypoint.sh /usr/local/bin/
# Temporarily use the `build`-stage /tmp folder to access the glibc APKs:
RUN --mount=type=bind,from=build,source=/tmp,target=/tmp \
addgroup -g 1000 bun \
RUN addgroup -g 1000 bun \
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
&& apk --no-cache --force-overwrite --allow-untrusted add \
/tmp/glibc.apk \
/tmp/glibc-bin.apk \
&& rm /tmp/glibc.apk \
&& rm /tmp/glibc-bin.apk \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \

View File

@@ -58,18 +58,17 @@ Pass a path to the shared library and a map of symbols to import into `dlopen`:
```ts
import { dlopen, FFIType, suffix } from "bun:ffi";
const { i32 } = FFIType;
const path = `libadd.${suffix}`;
const lib = dlopen(path, {
add: {
args: [i32, i32],
returns: i32,
args: [FFIType.i32, FFIType.i32],
returns: FFIType.i32,
},
});
console.log(lib.symbols.add(1, 2));
lib.symbols.add(1, 2);
```
### Rust
@@ -77,7 +76,7 @@ console.log(lib.symbols.add(1, 2));
```rust
// add.rs
#[no_mangle]
pub extern "C" fn add(a: i32, b: i32) -> i32 {
pub extern "C" fn add(a: isize, b: isize) -> isize {
a + b
}
```
@@ -88,22 +87,6 @@ To compile:
$ rustc --crate-type cdylib add.rs
```
### C++
```c
#include <cstdint>
extern "C" int32_t add(int32_t a, int32_t b) {
return a + b;
}
```
To compile:
```bash
$ zig build-lib add.cpp -dynamic -lc -lc++
```
## FFI types
The following `FFIType` values are supported.

View File

@@ -56,45 +56,6 @@ const stream = new ReadableStream({
When using a direct `ReadableStream`, all chunk queueing is handled by the destination. The consumer of the stream receives exactly what is passed to `controller.write()`, without any encoding or modification.
## Async generator streams
Bun also supports async generator functions as a source for `Response` and `Request`. This is an easy way to create a `ReadableStream` that fetches data from an asynchronous source.
```ts
const response = new Response(async function* () {
yield "hello";
yield "world";
}());
await response.text(); // "helloworld"
```
You can also use `[Symbol.asyncIterator]` directly.
```ts
const response = new Response({
[Symbol.asyncIterator]: async function* () {
yield "hello";
yield "world";
},
});
await response.text(); // "helloworld"
```
If you need more granular control over the stream, `yield` will return the direct ReadableStream controller.
```ts
const response = new Response({
[Symbol.asyncIterator]: async function* () {
const controller = yield "hello";
await controller.end();
},
});
await response.text(); // "hello"
```
## `Bun.ArrayBufferSink`
The `Bun.ArrayBufferSink` class is a fast incremental writer for constructing an `ArrayBuffer` of unknown size.

View File

@@ -115,7 +115,7 @@ Use `Bun.connect` to connect to a TCP server. Specify the server to connect to w
```ts
// The client
const socket = await Bun.connect({
const socket = Bun.connect({
hostname: "localhost",
port: 8080,
@@ -138,7 +138,7 @@ To require TLS, specify `tls: true`.
```ts
// The client
const socket = await Bun.connect({
const socket = Bun.connect({
// ... config
tls: true,
});
@@ -164,7 +164,7 @@ server.reload({
```
```ts#Client
const socket = await Bun.connect({ /* config */ })
const socket = Bun.connect({ /* config */ })
socket.reload({
data(){
// new 'data' handler

View File

@@ -261,12 +261,13 @@ This function is optimized for large input. On an M1X, it processes 480 MB/s -
20 GB/s, depending on how much data is being escaped and whether there is non-ascii
text. Non-string types will be converted to a string before escaping.
## `Bun.stringWidth()` ~6,756x faster `string-width` alternative
## `Bun.stringWidth()`
Get the column count of a string as it would be displayed in a terminal.
Supports ANSI escape codes, emoji, and wide characters.
```ts
Bun.stringWidth(input: string, options?: { countAnsiEscapeCodes?: boolean = false }): number
```
Example usage:
Returns the number of columns required to display a string. This is useful for aligning text in a terminal. By default, ANSI escape codes are removed before measuring the string. To include them, pass `{ countAnsiEscapeCodes: true }` as the second argument.
```ts
Bun.stringWidth("hello"); // => 5
@@ -274,131 +275,8 @@ Bun.stringWidth("\u001b[31mhello\u001b[0m"); // => 5
Bun.stringWidth("\u001b[31mhello\u001b[0m", { countAnsiEscapeCodes: true }); // => 12
```
This is useful for:
- Aligning text in a terminal
- Quickly checking if a string contains ANSI escape codes
- Measuring the width of a string in a terminal
Compared with the popular `string-width` npm package, `bun`'s implementation is > [100x faster](https://github.com/oven-sh/bun/blob/8abd1fb088bcf2e78bd5d0d65ba4526872d2ab61/bench/snippets/string-width.mjs#L22)
This API is designed to match the popular "string-width" package, so that
existing code can be easily ported to Bun and vice versa.
[In this benchmark](https://github.com/oven-sh/bun/blob/5147c0ba7379d85d4d1ed0714b84d6544af917eb/bench/snippets/string-width.mjs#L13), `Bun.stringWidth` is a ~6,756x faster than the `string-width` npm package for input larger than about 500 characters. Big thanks to [sindresorhus](https://github.com/sindresorhus) for their work on `string-width`!
```ts
bun string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: bun 1.0.29 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
Bun.stringWidth 500 chars ascii 37.09 ns/iter (36.77 ns … 41.11 ns) 37.07 ns 38.84 ns 38.99 ns
node string-width.mjs
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
npm/string-width 500 chars ascii 249,710 ns/iter (239,970 ns … 293,180 ns) 250,930 ns 276,700 ns 281,450 ns
```
To make `Bun.stringWidth` fast, we've implemented it in Zig using optimized SIMD instructions, accounting for Latin1, UTF-16, and UTF-8 encodings. It passes `string-width`'s tests.
{% details summary="View full benchmark" %}
As a reminder, 1 nanosecond (ns) is 1 billionth of a second. Here's a quick reference for converting between units:
| Unit | 1 Millisecond |
| ---- | ------------- |
| ns | 1,000,000 |
| µs | 1,000 |
| ms | 1 |
```js
bun string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: bun 1.0.29 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
------------------------------------------------------------------------------------- -----------------------------
Bun.stringWidth 5 chars ascii 16.45 ns/iter (16.27 ns … 19.71 ns) 16.48 ns 16.93 ns 17.21 ns
Bun.stringWidth 50 chars ascii 19.42 ns/iter (18.61 ns … 27.85 ns) 19.35 ns 21.7 ns 22.31 ns
Bun.stringWidth 500 chars ascii 37.09 ns/iter (36.77 ns … 41.11 ns) 37.07 ns 38.84 ns 38.99 ns
Bun.stringWidth 5,000 chars ascii 216.9 ns/iter (215.8 ns … 228.54 ns) 216.23 ns 228.52 ns 228.53 ns
Bun.stringWidth 25,000 chars ascii 1.01 µs/iter (1.01 µs … 1.01 µs) 1.01 µs 1.01 µs 1.01 µs
Bun.stringWidth 7 chars ascii+emoji 54.2 ns/iter (53.36 ns … 58.19 ns) 54.23 ns 57.55 ns 57.94 ns
Bun.stringWidth 70 chars ascii+emoji 354.26 ns/iter (350.51 ns … 363.96 ns) 355.93 ns 363.11 ns 363.96 ns
Bun.stringWidth 700 chars ascii+emoji 3.3 µs/iter (3.27 µs … 3.4 µs) 3.3 µs 3.4 µs 3.4 µs
Bun.stringWidth 7,000 chars ascii+emoji 32.69 µs/iter (32.22 µs … 45.27 µs) 32.7 µs 34.57 µs 34.68 µs
Bun.stringWidth 35,000 chars ascii+emoji 163.35 µs/iter (161.17 µs … 170.79 µs) 163.82 µs 169.66 µs 169.93 µs
Bun.stringWidth 8 chars ansi+emoji 66.15 ns/iter (65.17 ns … 69.97 ns) 66.12 ns 69.8 ns 69.87 ns
Bun.stringWidth 80 chars ansi+emoji 492.95 ns/iter (488.05 ns … 499.5 ns) 494.8 ns 498.58 ns 499.5 ns
Bun.stringWidth 800 chars ansi+emoji 4.73 µs/iter (4.71 µs … 4.88 µs) 4.72 µs 4.88 µs 4.88 µs
Bun.stringWidth 8,000 chars ansi+emoji 47.02 µs/iter (46.37 µs … 67.44 µs) 46.96 µs 49.57 µs 49.63 µs
Bun.stringWidth 40,000 chars ansi+emoji 234.45 µs/iter (231.78 µs … 240.98 µs) 234.92 µs 236.34 µs 236.62 µs
Bun.stringWidth 19 chars ansi+emoji+ascii 135.46 ns/iter (133.67 ns … 143.26 ns) 135.32 ns 142.55 ns 142.77 ns
Bun.stringWidth 190 chars ansi+emoji+ascii 1.17 µs/iter (1.16 µs … 1.17 µs) 1.17 µs 1.17 µs 1.17 µs
Bun.stringWidth 1,900 chars ansi+emoji+ascii 11.45 µs/iter (11.26 µs … 20.41 µs) 11.45 µs 12.08 µs 12.11 µs
Bun.stringWidth 19,000 chars ansi+emoji+ascii 114.06 µs/iter (112.86 µs … 120.06 µs) 114.25 µs 115.86 µs 116.15 µs
Bun.stringWidth 95,000 chars ansi+emoji+ascii 572.69 µs/iter (565.52 µs … 607.22 µs) 572.45 µs 604.86 µs 605.21 µs
```
```ts
node string-width.mjs
cpu: 13th Gen Intel(R) Core(TM) i9-13900
runtime: node v21.4.0 (x64-linux)
benchmark time (avg) (min … max) p75 p99 p995
-------------------------------------------------------------------------------------- -----------------------------
npm/string-width 5 chars ascii 3.19 µs/iter (3.13 µs … 3.48 µs) 3.25 µs 3.48 µs 3.48 µs
npm/string-width 50 chars ascii 20.09 µs/iter (18.93 µs … 435.06 µs) 19.49 µs 21.89 µs 22.59 µs
npm/string-width 500 chars ascii 249.71 µs/iter (239.97 µs … 293.18 µs) 250.93 µs 276.7 µs 281.45 µs
npm/string-width 5,000 chars ascii 6.69 ms/iter (6.58 ms … 6.76 ms) 6.72 ms 6.76 ms 6.76 ms
npm/string-width 25,000 chars ascii 139.57 ms/iter (137.17 ms … 143.28 ms) 140.49 ms 143.28 ms 143.28 ms
npm/string-width 7 chars ascii+emoji 3.7 µs/iter (3.62 µs … 3.94 µs) 3.73 µs 3.94 µs 3.94 µs
npm/string-width 70 chars ascii+emoji 23.93 µs/iter (22.44 µs … 331.2 µs) 23.15 µs 25.98 µs 30.2 µs
npm/string-width 700 chars ascii+emoji 251.65 µs/iter (237.78 µs … 444.69 µs) 252.92 µs 325.89 µs 354.08 µs
npm/string-width 7,000 chars ascii+emoji 4.95 ms/iter (4.82 ms … 5.19 ms) 5 ms 5.04 ms 5.19 ms
npm/string-width 35,000 chars ascii+emoji 96.93 ms/iter (94.39 ms … 102.58 ms) 97.68 ms 102.58 ms 102.58 ms
npm/string-width 8 chars ansi+emoji 3.92 µs/iter (3.45 µs … 4.57 µs) 4.09 µs 4.57 µs 4.57 µs
npm/string-width 80 chars ansi+emoji 24.46 µs/iter (22.87 µs … 4.2 ms) 23.54 µs 25.89 µs 27.41 µs
npm/string-width 800 chars ansi+emoji 259.62 µs/iter (246.76 µs … 480.12 µs) 258.65 µs 349.84 µs 372.55 µs
npm/string-width 8,000 chars ansi+emoji 5.46 ms/iter (5.41 ms … 5.57 ms) 5.48 ms 5.55 ms 5.57 ms
npm/string-width 40,000 chars ansi+emoji 108.91 ms/iter (107.55 ms … 109.5 ms) 109.25 ms 109.5 ms 109.5 ms
npm/string-width 19 chars ansi+emoji+ascii 6.53 µs/iter (6.35 µs … 6.75 µs) 6.54 µs 6.75 µs 6.75 µs
npm/string-width 190 chars ansi+emoji+ascii 55.52 µs/iter (52.59 µs … 352.73 µs) 54.19 µs 80.77 µs 167.21 µs
npm/string-width 1,900 chars ansi+emoji+ascii 701.71 µs/iter (653.94 µs … 893.78 µs) 715.3 µs 855.37 µs 872.9 µs
npm/string-width 19,000 chars ansi+emoji+ascii 27.19 ms/iter (26.89 ms … 27.41 ms) 27.28 ms 27.41 ms 27.41 ms
npm/string-width 95,000 chars ansi+emoji+ascii 3.68 s/iter (3.66 s … 3.7 s) 3.69 s 3.7 s 3.7 s
```
{% /details %}
TypeScript definition:
```ts
namespace Bun {
export function stringWidth(
/**
* The string to measure
*/
input: string,
options?: {
/**
* If `true`, count ANSI escape codes as part of the string width. If `false`, ANSI escape codes are ignored when calculating the string width.
*
* @default false
*/
countAnsiEscapeCodes?: boolean;
/**
* When it's ambiugous and `true`, count emoji as 1 characters wide. If `false`, emoji are counted as 2 character wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
},
): number;
}
```
<!-- ## `Bun.enableANSIColors()` -->

View File

@@ -26,31 +26,12 @@ All imported files and packages are bundled into the executable, along with a co
**Note** — Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:
- `--outdir` — use `outfile` instead.
- `--external`
- `--splitting`
- `--public-path`
{% /callout %}
## Deploying to production
Compiled executables reduce memory usage and improve Bun's start time.
Normally, Bun reads and transpiles JavaScript and TypeScript files on `import` and `require`. This is part of what makes so much of Bun "just work", but it's not free. It costs time and memory to read files from disk, resolve file paths, parse, transpile, and print source code.
With compiled executables, you can move that cost from runtime to build-time.
When deploying to production, we recommend the following:
```sh
bun build --compile --minify --sourcemap ./path/to/my/app.ts --outfile myapp
```
**What do these flags do?**
The `--minify` argument optimizes the size of the transpiled output code. If you have a large application, this can save megabytes of space. For smaller applications, it might still improve start time a little.
The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that errors & stacktraces point to their original locations instead of the transpiled location. Bun will automatically decompress & resolve the sourcemap when an error occurs.
## SQLite
You can use `bun:sqlite` imports with `bun build --compile`.

View File

@@ -7,7 +7,7 @@ There are a few behavioral differences to note.
## Performance
With a performance-minded API coupled with the extensively optimized Zig-based JS/TS parser, Bun's bundler is 1.75x faster than esbuild on esbuild's [three.js benchmark](https://github.com/oven-sh/bun/tree/main/bench/bundle).
With an performance-minded API coupled with the extensively optimized Zig-based JS/TS parser, Bun's bundler is 1.75x faster than esbuild on esbuild's [three.js benchmark](https://github.com/oven-sh/bun/tree/main/bench/bundle).
{% image src="/images/bundler-speed.png" caption="Bundling 10 copies of three.js from scratch, with sourcemaps and minification" /%}

View File

@@ -195,7 +195,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
- name: Install dependencies

View File

@@ -75,6 +75,14 @@ $ bun run dev --watch # ❌ don't do this
Flags that occur at the end of the command will be ignored and passed through to the `"dev"` script itself.
{% /callout %}
### `--smol`
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
## Run a `package.json` script
{% note %}
@@ -87,7 +95,7 @@ $ bun [bun flags] run <script> [script flags]
Your `package.json` can define a number of named `"scripts"` that correspond to shell commands.
```json
```jsonc
{
// ... other fields
"scripts": {
@@ -150,32 +158,3 @@ By default, Bun respects this shebang and executes the script with `node`. Howev
```bash
$ bun run --bun vite
```
## `bun run -` to pipe code from stdin
`bun run -` lets you read JavaScript, TypeScript, TSX, or JSX from stdin and execute it without writing to a temporary file first.
```bash
$ echo "console.log('Hello')" | bun run -
Hello
```
You can also use `bun run -` to redirect files into Bun. For example, to run a `.js` file as if it were a `.ts` file:
```bash
$ echo "console.log!('This is TypeScript!' as any)" > secretly-typescript.js
$ bun run - < secretly-typescript.js
This is TypeScript!
```
For convenience, all code is treated as TypeScript with JSX support when using `bun run -`.
## `bun run --smol`
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
This causes the garbage collector to run more frequently, which can slow down execution. However, it can be useful in environments with limited memory. Bun automatically adjusts the garbage collector's heap size based on the available memory (accounting for cgroups and other memory limits) with and without the `--smol` flag, so this is mostly useful for cases where you want to make the heap size grow more slowly.

View File

@@ -1,33 +0,0 @@
---
name: fetch with unix domain sockets in Bun
---
In Bun, the `unix` option in `fetch()` lets you send HTTP requests over a [unix domain socket](https://en.wikipedia.org/wiki/Unix_domain_socket).
```ts
const unix = "/var/run/docker.sock";
const response = await fetch("http://localhost/info", { unix });
const body = await response.json();
console.log(body); // { ... }
```
---
The `unix` option is a string that specifies the local file path to a unix domain socket. The `fetch()` function will use the socket to send the request to the server instead of using a TCP network connection. `https` is also supported by using the `https://` protocol in the URL instead of `http://`.
To send a `POST` request to an API endpoint over a unix domain socket:
```ts
const response = await fetch("https://hostname/a/path", {
unix: "/var/run/path/to/unix.sock",
method: "POST",
body: JSON.stringify({ message: "Hello from Bun!" }),
headers: {
"Content-Type": "application/json",
},
});
const body = await response.json();
```

View File

@@ -1,46 +0,0 @@
---
name: Common HTTP server usage
---
This starts an HTTP server listening on port `3000`. It demonstrates basic routing with a number of common responses and also handles POST data from standard forms or as JSON.
See [`Bun.serve`](/docs/api/http) for details.
```ts
const server = Bun.serve({
async fetch (req) {
const path = new URL(req.url).pathname;
// respond with text/html
if (path === "/") return new Response("Welcome to Bun!");
// redirect
if (path === "/abc") return Response.redirect("/source", 301);
// send back a file (in this case, *this* file)
if (path === "/source") return new Response(Bun.file(import.meta.file));
// respond with JSON
if (path === "/api") return Response.json({ some: "buns", for: "you" });
// receive JSON data to a POST request
if (req.method === "POST" && path === "/api/post") {
const data = await req.json();
console.log("Received JSON:", data);
return Response.json({ success: true, data });
}
// receive POST data from a form
if (req.method === "POST" && path === "/form") {
const data = await req.formData();
console.log(data.get("someField"));
return new Response("Success");
}
// 404s
return new Response("Page not found", { status: 404 });
}
})
console.log(`Listening on ${server.url}`);
```

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# ...
- uses: actions/checkout@v4
- uses: actions/checkout@v3
+ - uses: oven-sh/setup-bun@v1
# run any `bun` or `bunx` command

View File

@@ -1,40 +0,0 @@
---
name: Run a Shell Command
---
Bun Shell is a cross-platform bash-like shell built in to Bun.
It provides a simple way to run shell commands in JavaScript and TypeScript. To get started, import the `$` function from the `bun` package and use it to run shell commands.
```ts#foo.ts
import { $ } from "bun";
await $`echo Hello, world!`; // => "Hello, world!"
```
---
The `$` function is a tagged template literal that runs the command and returns a promise that resolves with the command's output.
```ts#foo.ts
import { $ } from "bun";
const output = await $`ls -l`.text();
console.log(output);
```
---
To get each line of the output as an array, use the `lines` method.
```ts#foo.ts
import { $ } from "bun";
for await (const line of $`ls -l`.lines()) {
console.log(line);
}
```
---
See [Docs > API > Shell](/docs/runtime/shell) for complete documentation.

View File

@@ -15,13 +15,13 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
```jsonc
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext","DOM"],
// enable latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
"jsx": "react-jsx", // support JSX
"allowJs": true, // allow importing `.js` from `.ts`
// Bundler mode
"moduleResolution": "bundler",
@@ -32,11 +32,12 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
// Best practices
"strict": true,
"skipLibCheck": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags
"noUnusedLocals": true,
"noUnusedParameters": true,
"useUnknownInCatchVariables": true,
"noPropertyAccessFromIndexSignature": true
}
}

View File

@@ -93,7 +93,7 @@ $ bun test --timeout 10000
Many other flags become irrelevant or obsolete when using `bun test`.
- `transform` — Bun supports TypeScript & JSX. Other file types can be configured with [Plugins](/docs/runtime/plugins).
- `transform` — Buns supports TypeScript & JSX. Other file types can be configured with [Plugins](/docs/runtime/plugins).
- `extensionsToTreatAsEsm`
- `haste` — Bun uses it's own internal source maps
- `watchman`, `watchPlugins`, `watchPathIgnorePatterns` — use `--watch` to run tests in watch mode

View File

@@ -1,15 +0,0 @@
---
name: Get the path to an executable bin file
---
`Bun.which` is a utility function to find the absolute path of an executable file. It is similar to the `which` command in Unix-like systems.
```ts#foo.ts
Bun.which("sh"); // => "/bin/sh"
Bun.which("notfound"); // => null
Bun.which("bun"); // => "/home/user/.bun/bin/bun"
```
---
See [Docs > API > Utils](/docs/api/utils#bun-which) for complete documentation.

View File

@@ -0,0 +1,28 @@
---
name: Upgrade an HTTP request to a WebSocket connection
---
Inside `fetch`, use the `server.upgrade()` function to upgrade an incoming `Request` to a WebSocket connection. Bun automatically returns a 101 Switching Protocols response if the upgrade succeeds.
Refer to the [WebSocket docs](/docs/api/websockets) for more information on building WebSocket servers.
```ts
const server = Bun.serve<{ authToken: string }>({
fetch(req, server) {
const success = server.upgrade(req);
if (success) {
// Bun automatically returns a 101 Switching Protocols
// if the upgrade succeeds
return undefined;
}
// handle HTTP request normally
return new Response("Hello world!");
},
websocket: {
// define websocket handlers
},
});
console.log(`Listening on localhost:\${server.port}`);
```

View File

@@ -1,4 +1,4 @@
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
@@ -6,18 +6,18 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
$ bun run index.tsx # TS and JSX supported out of the box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager, all significantly faster than existing tools and usable in existing Node.js projects with little to no changes necessary.
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager, all significantly faster than existing tools and usable in existing Node.js projects with little to no changes necessary.
```bash
$ bun run start # run the `start` script
$ bun install <pkg> # install a package
$ bun install <pkg> # install a package
$ bun build ./index.tsx # bundle a project for browsers
$ bun test # run tests
$ bunx cowsay 'Hello, world!' # execute a package
```
{% callout type="note" %}
**Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
**Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
{% /callout %}
Get started with one of the quick links below, or read on to learn more about Bun.

View File

@@ -1,6 +1,6 @@
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
{% details summary="Configuring cache behavior" (bunfig.toml) %}
{% details summary="Configuring cache behavior" %}
```toml
[install.cache]

View File

@@ -22,7 +22,8 @@ $ npm install -g bun # the last `npm` command you'll ever need
```
```bash#Homebrew
$ brew install oven-sh/bun/bun # for macOS and Linux
$ brew tap oven-sh/bun # for macOS and Linux
$ brew install bun
```
```bash#Docker
@@ -232,10 +233,6 @@ If you need to remove Bun from your system, use the following commands.
$ rm -rf ~/.bun # for macOS, Linux, and WSL
```
```powershell#Windows
powershell -c ~\.bun\uninstall.ps1
```
```bash#NPM
$ npm uninstall -g bun
```

View File

@@ -1,6 +1,6 @@
This document describes the build process for Windows. If you run into problems, please join the [#windows channel on our Discord](http://bun.sh/discord) for help.
It is strongly recommended to use [PowerShell 7 (`pwsh.exe`)](https://learn.microsoft.com/en-us/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.4) instead of the default `powershell.exe`.
It is strongly recommended to use [PowerShell 7 (pwsh.exe)](https://learn.microsoft.com/en-us/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.4) instead of the default `powershell.exe`.
## Prerequisites
@@ -44,12 +44,6 @@ Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
### System Dependencies
- Bun 1.1 or later. We use Bun to run it's own code generators.
```ps1
irm bun.sh/install.ps1 | iex
```
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload.
- Install Git and CMake from this installer, if not already installed.
@@ -63,20 +57,16 @@ After Visual Studio, you need the following:
- Ruby
- Node.js
{% callout %}
The Zig compiler is automatically downloaded, installed, and updated by the building process.
{% /callout %}
[Scoop](https://scoop.sh) can be used to install these remaining tools easily:
[Scoop](https://scoop.sh) can be used to install these easily:
```ps1
irm https://get.scoop.sh | iex
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
scoop install nodejs-lts go rust nasm ruby perl
scoop llvm@16.0.4 # scoop bug if you install llvm and the rest at the same time
```
If you intend on building WebKit locally (optional), you should install these packages:
If you intend on building WebKit locally (optional), you should install some more packages:
```ps1
scoop install make cygwin python
@@ -98,51 +88,65 @@ Get-Command mt
It is not recommended to install `ninja` / `cmake` into your global path, because you may run into a situation where you try to build bun without .\scripts\env.ps1 sourced.
{% /callout %}
### Codegen
On Unix platforms, we depend on an existing build of Bun to generate code for itself. Since the Windows build is not stable enough for this to run the code generators, you currently need to use another computer or WSL to generate this:
```bash
$ wsl --install # run twice if it doesnt install
# in the linux environment
$ sudo apt install unzip
$ curl -fsSL https://bun.sh/install | bash
```
Whenever codegen-related things are updated, please re-run
```ps1
$ .\scripts\codegen.ps1
```
(TODO: it probably is stable enough to use `bun.exe` for codegen, but the CMake configuration still has these disabled by default)
## Building
```ps1
bun install
bun install # or npm install
.\scripts\env.ps1
.\scripts\update-submodules.ps1 # this syncs git submodule state
.\scripts\all-dependencies.ps1 # this builds all dependencies
.\scripts\make-old-js.ps1 # runs some old code generators
.\scripts\all-dependencies.ps1 # this builds all dependencies
# Configure build environment
cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
cd build # this was created by the codegen.ps1 script earlier
# Build bun
ninja -Cbuild
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug
ninja
```
If this was successful, you should have a `bun-debug.exe` in the `build` folder.
```ps1
.\build\bun-debug.exe --revision
.\build\bun-debug.exe --version
```
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
## Extra paths
- WebKit is extracted to `build/bun-webkit`
- Zig is extracted to `.cache/zig/zig.exe`
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build` to your path.
## Tests
You can run the test suite either using `bun test`, or by using the wrapper script `packages\bun-internal-test`. The internal test package is a wrapper cli to run every test file in a separate instance of bun.exe, to prevent a crash in the test runner from stopping the entire suite.
You can run the test suite by using `packages\bun-internal-test`
```ps1
# Setup
bun i --cwd packages\bun-internal-test
cd packages\bun-internal-test
bun i
cd ..\..
# Run the entire test suite with reporter
# the package.json script "test" uses "build/bun-debug.exe" by default
bun run test
# Run an individual test file:
bun-debug test node\fs
bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
bun test node\fs
bun test "C:\bun\test\js\bun\resolve\import-meta.test.js"
```
## Troubleshooting

View File

@@ -13,7 +13,7 @@ $ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool
```
```bash#Ubuntu/Debian
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
$ sudo apt install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
```
```bash#Arch
@@ -24,31 +24,34 @@ $ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
```
{% /codetabs %}
> **Note**: The Zig compiler is automatically installed and updated by the build scripts. Manual installation is not required.
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
{% codetabs %}
```bash#Native
$ curl -fsSL https://bun.sh/install | bash
$ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
```
```bash#npm
$ npm install -g bun
$ npm install -g bun # the last `npm` command you'll ever need
```
```bash#Homebrew
$ brew tap oven-sh/bun
$ brew tap oven-sh/bun # for macOS and Linux
$ brew install bun
```
```bash#Docker
$ docker pull oven/bun
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
```
```bash#proto
$ proto install bun
```
{% /codetabs %}
## Install LLVM
@@ -76,10 +79,6 @@ $ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
```
```bash#openSUSE Tumbleweed
$ sudo zypper install clang16 lld16 llvm16
```
{% /codetabs %}
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
@@ -137,14 +136,12 @@ $ cmake -S . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug
$ ninja -C build # 'bun run build' runs just this
```
Advanced users can pass CMake flags to customize the build.
Advanced uses can pass CMake flags to customize the build.
## VSCode
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig` (`zig.exe` on Windows).
## Code generation scripts
{% callout %}
@@ -307,7 +304,8 @@ $ xcode-select --install
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
$ cmake -Bbuild -GNinja -DUSE_STATIC_LIBATOMIC=ON
$ ninja -Cbuild
```
The built version of Bun may not work on other systems if compiled this way.

View File

@@ -30,20 +30,17 @@ Click the link in the right column to jump to the associated documentation.
---
- File I/O
- [`Bun.file`](/docs/api/file-io#reading-files-bun-file)
[`Bun.write`](/docs/api/file-io#writing-files-bun-write)
- [`Bun.file`](/docs/api/file-io#reading-files-bun-file) [`Bun.write`](/docs/api/file-io#writing-files-bun-write)
---
- Child processes
- [`Bun.spawn`](/docs/api/spawn#spawn-a-process-bun-spawn)
[`Bun.spawnSync`](/docs/api/spawn#blocking-api-bun-spawnsync)
- [`Bun.spawn`](/docs/api/spawn#spawn-a-process-bun-spawn) [`Bun.spawnSync`](/docs/api/spawn#blocking-api-bun-spawnsync)
---
- TCP
- [`Bun.listen`](/docs/api/tcp#start-a-server-bun-listen)
[`Bun.connect`](/docs/api/tcp#start-a-server-bun-listen)
- [`Bun.listen`](/docs/api/tcp#start-a-server-bun-listen) [`Bun.connect`](/docs/api/tcp#start-a-server-bun-listen)
---
@@ -63,8 +60,7 @@ Click the link in the right column to jump to the associated documentation.
---
- Hashing
- [`Bun.hash`](/docs/api/hashing#bun-hash)
[`Bun.CryptoHasher`](/docs/api/hashing#bun-cryptohasher)
- [`Bun.hash`](/docs/api/hashing#bun-hash) [`Bun.CryptoHasher`](/docs/api/hashing#bun-cryptohasher)
---
@@ -104,26 +100,6 @@ Click the link in the right column to jump to the associated documentation.
---
- Utilities
- [`Bun.version`](/docs/api/utils#bun-version)
[`Bun.revision`](/docs/api/utils#bun-revision)
[`Bun.env`](/docs/api/utils#bun-env)
[`Bun.main`](/docs/api/utils#bun-main)
[`Bun.sleep()`](/docs/api/utils#bun-sleep)
[`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync)
[`Bun.which()`](/docs/api/utils#bun-which)
[`Bun.peek()`](/docs/api/utils#bun-peek)
[`Bun.openInEditor()`](/docs/api/utils#bun-openineditor)
[`Bun.deepEquals()`](/docs/api/utils#bun-deepequals)
[`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml)
[`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath)
[`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl)
[`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync)
[`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync)
[`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync)
[`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync)
[`Bun.inspect()`](/docs/api/utils#bun-inspect)
[`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds)
[`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto)
[`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
- [`Bun.version`](/docs/api/utils#bun-version) [`Bun.revision`](/docs/api/utils#bun-revision) [`Bun.env`](/docs/api/utils#bun-env) [`Bun.main`](/docs/api/utils#bun-main) [`Bun.sleep()`](/docs/api/utils#bun-sleep) [`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync) [`Bun.which()`](/docs/api/utils#bun-which) [`Bun.peek()`](/docs/api/utils#bun-peek) [`Bun.openInEditor()`](/docs/api/utils#bun-openineditor) [`Bun.deepEquals()`](/docs/api/utils#bun-deepequals) [`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml) [`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath) [`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl) [`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync) [`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync) [`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync) [`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync) [`Bun.inspect()`](/docs/api/utils#bun-inspect) [`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds) [`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto) [`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
{% /table %}

View File

@@ -426,94 +426,4 @@ editor = "code"
# - "nvim", "neovim"
# - "vim","vi"
# - "emacs"
```
-->
## `bun run`
The `bun run` command can be configured under the `[run]` section. These apply to the `bun run` command and the `bun` command when running a file or executable or script.
Currently, `bunfig.toml` isn't always automatically loaded for `bun run` in a local project (it does check for a global `bunfig.toml`), so you might still need to pass `-c` or `-c=bunfig.toml` to use these settings.
### `run.shell` - use the system shell or Bun's shell
The shell to use when running package.json scripts via `bun run` or `bun`. On Windows, this defaults to `"bun"` and on other platforms it defaults to `"system"`.
To always use the system shell instead of Bun's shell (default behavior unless Windows):
```toml
[run]
# default outside of Windows
shell = "system"
```
To always use Bun's shell instead of the system shell:
```toml
[run]
# default on Windows
shell = "bun"
```
### `run.bun` - auto alias `node` to `bun`
When `true`, this prepends `$PATH` with a `node` symlink that points to the `bun` binary for all scripts or executables invoked by `bun run` or `bun`.
This means that if you have a script that runs `node`, it will actually run `bun` instead, without needing to change your script. This works recursively, so if your script runs another script that runs `node`, it will also run `bun` instead. This applies to shebangs as well, so if you have a script with a shebang that points to `node`, it will actually run `bun` instead.
By default, this is enabled if `node` is not already in your `$PATH`.
```toml
[run]
# equivalent to `bun --bun` for all `bun run` commands
bun = true
```
You can test this by running:
```sh
$ bun --bun which node # /path/to/bun
$ bun which node # /path/to/node
```
This option is equivalent to prefixing all `bun run` commands with `--bun`:
```sh
bun --bun run dev
bun --bun dev
bun run --bun dev
```
If set to `false`, this will disable the `node` symlink.
### `run.silent` - suppress reporting the command being run
When `true`, suppresses the output of the command being run by `bun run` or `bun`.
```toml
[run]
silent = true
```
Without this option, the command being run will be printed to the console:
```sh
$ bun run dev
> $ echo "Running \"dev\"..."
Running "dev"...
```
With this option, the command being run will not be printed to the console:
```sh
$ bun run dev
Running "dev"...
```
This is equivalent to passing `--silent` to all `bun run` commands:
```sh
bun --silent run dev
bun --silent dev
bun run --silent dev
```
``` -->

View File

@@ -163,16 +163,6 @@ These environment variables are read by Bun and configure aspects of its behavio
---
- `BUN_CONFIG_MAX_HTTP_REQUESTS`
- Control the maximum number of concurrent HTTP requests sent by fetch and `bun install`. Defaults to `256`. If you are running into rate limits or connection issues, you can reduce this number.
---
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
---
- `DO_NOT_TRACK`
- Telemetry is not sent yet as of November 28th, 2023, but we are planning to add telemetry in the coming months. If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.

View File

@@ -56,7 +56,7 @@ Some methods are not optimized yet.
### [`node:fs`](https://nodejs.org/api/fs.html)
🟡 Missing `statfs` `statfsSync`, `opendirSync`. `Dir` is partially implemented.
🟡 Missing `Dir` `openAsBlob` `opendir` `opendirSync` `statfs` `statfsSync`
### [`node:http`](https://nodejs.org/api/http.html)
@@ -116,7 +116,7 @@ Some methods are not optimized yet.
### [`node:stream`](https://nodejs.org/api/stream.html)
🟡 Missing `getDefaultHighWaterMark` `setDefaultHighWaterMark` `toWeb`
🟡 Missing `getDefaultHighWaterMark` `setDefaultHighWaterMark`
### [`node:string_decoder`](https://nodejs.org/api/string_decoder.html)
@@ -148,7 +148,7 @@ Some methods are not optimized yet.
### [`node:url`](https://nodejs.org/api/url.html)
🟢 Fully implemented.
🟡 Missing `domainToASCII` `domainToUnicode`. It's recommended to use `URL` and `URLSearchParams` globals instead.
### [`node:util`](https://nodejs.org/api/util.html)
@@ -432,7 +432,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
🟡 `URL.createObjectURL` is missing. See [Issue #3925](https://github.com/oven-sh/bun/issues/3925)
🟢 Fully implemented.
### [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)

View File

@@ -63,7 +63,7 @@ Plugins are primarily used to extend Bun with loaders for additional file types.
```ts#yamlPlugin.ts
import { plugin } from "bun";
await plugin({
plugin({
name: "YAML",
async setup(build) {
const { load } = await import("js-yaml");
@@ -179,7 +179,7 @@ Loading a YAML file is useful, but plugins support more than just data loading.
```ts#sveltePlugin.ts
import { plugin } from "bun";
await plugin({
plugin({
name: "svelte loader",
async setup(build) {
const { compile } = await import("svelte/compiler");

View File

@@ -12,7 +12,7 @@ import { $ } from "bun";
const response = await fetch("https://example.com");
// Use Response as stdin.
await $`cat < ${response} | wc -c`; // 1256
await $`echo < ${response} > wc -c`; // 120
```
## Features:
@@ -67,21 +67,9 @@ console.log(exitCode); // 0
## Redirection
A command's _input_ or _output_ may be _redirected_ using the typical Bash operators:
Bun Shell supports redirection with `<`, `>`, and `|` operators.
- `<` redirect stdin
- `>` or `1>` redirect stdout
- `2>` redirect stderr
- `&>` redirect both stdout and stderr
- `>>` or `1>>` redirect stdout, _appending_ to the destination, instead of overwriting
- `2>>` redirect stderr, _appending_ to the destination, instead of overwriting
- `&>>` redirect both stdout and stderr, _appending_ to the destination, instead of overwriting
- `1>&2` redirect stdout to stderr (all writes to stdout will instead be in stderr)
- `2>&1` redirect stderr to stdout (all writes to stderr will instead be in stdout)
Bun Shell also supports redirecting from and to JavaScript objects.
### Example: Redirect output to JavaScript objects (`>`)
### To JavaScript objects (`>`)
To redirect stdout to a JavaScript object, use the `>` operator:
@@ -100,7 +88,7 @@ The following JavaScript objects are supported for redirection to:
- `Buffer`, `Uint8Array`, `Uint16Array`, `Uint32Array`, `Int8Array`, `Int16Array`, `Int32Array`, `Float32Array`, `Float64Array`, `ArrayBuffer`, `SharedArrayBuffer` (writes to the underlying buffer)
- `Bun.file(path)`, `Bun.file(fd)` (writes to the file)
### Example: Redirect input from JavaScript objects (`<`)
### From JavaScript objects (`<`)
To redirect the output from JavaScript objects to stdin, use the `<` operator:
@@ -120,51 +108,7 @@ The following JavaScript objects are supported for redirection from:
- `Bun.file(path)`, `Bun.file(fd)` (reads from the file)
- `Response` (reads from the body)
### Example: Redirect stdin -> file
```js
import { $ } from "bun";
await $`cat < myfile.txt`;
```
### Example: Redirect stdout -> file
```js
import { $ } from "bun";
await $`echo bun! > greeting.txt`;
```
### Example: Redirect stderr -> file
```js
import { $ } from "bun";
await $`bun run index.ts 2> errors.txt`;
```
### Example: Redirect stdout -> stderr
```js
import { $ } from "bun";
// redirects stderr to stdout, so all output
// will be available on stdout
await $`bun run ./index.ts 2>&1`;
```
### Example: Redirect stderr -> stdout
```js
import { $ } from "bun";
// redirects stdout to stderr, so all output
// will be available on stderr
await $`bun run ./index.ts 1>&2`;
```
## Piping (`|`)
### Piping (`|`)
Like in bash, you can pipe the output of one command to another:
@@ -385,7 +329,7 @@ Exposes Bun Shell's escaping logic as a function:
```js
import { $ } from "bun";
console.log($.escape('$(foo) `bar` "baz"'));
console.log($.escape('$(foo) `bar` "baz"'))
// => \$(foo) \`bar\` \"baz\"
```
@@ -394,32 +338,31 @@ If you do not want your string to be escaped, wrap it in a `{ raw: 'str' }` obje
```js
import { $ } from "bun";
await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`;
await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`
// => bun: command not found: foo
// => bun: command not found: bar
// => baz
```
## .sh file loader
## .bun.sh file loader
For simple shell scripts, instead of `/bin/sh`, you can use Bun Shell to run shell scripts.
For simple shell scripts, instead of `sh`, you can use Bun Shell to run shell scripts.
To do so, just run the script with `bun` on a file with the `.bun.sh` extension.
```sh#script.bun.sh
echo "Hello World! pwd=$(pwd)"
```
To do that, run any file with bun that ends with `.bun.sh`:
```sh
$ echo "echo Hello World!" > script.bun.sh
$ bun ./script.bun.sh
Hello World! pwd=/home/demo
> Hello World!
```
Scripts with Bun Shell are cross platform, which means they work on Windows:
On Windows, Bun Shell is used automatically to run `.sh` files when using Bun:
```
PS C:\Users\Demo> bun .\script.bun.sh
Hello World! pwd=C:\Users\Demo
```sh
$ echo "echo Hello World!" > script.sh
# On windows, .bun.sh is not needed, just .sh
$ bun ./script.sh
> Hello World!
```
## Credits

View File

@@ -52,7 +52,7 @@ It is possible to specify a coverage threshold in `bunfig.toml`. If your test su
coverageThreshold = 0.9
# to set different thresholds for lines and functions
coverageThreshold = { lines = 0.9, functions = 0.9 }
coverageThreshold = { line = 0.9, function = 0.9 }
```
### Sourcemaps

View File

@@ -327,7 +327,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.assertions()`](https://jestjs.io/docs/expect#expectassertionsnumber)
---
@@ -337,7 +337,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.hasAssertions()`](https://jestjs.io/docs/expect#expecthasassertions)
---

View File

@@ -17,13 +17,13 @@ Bun supports things like top-level await, JSX, and extensioned `.ts` imports, wh
```jsonc
{
"compilerOptions": {
// Enable latest features
// enable latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
"jsx": "react-jsx", // support JSX
"allowJs": true, // allow importing `.js` from `.ts`
// Bundler mode
"moduleResolution": "bundler",
@@ -34,17 +34,18 @@ Bun supports things like top-level await, JSX, and extensioned `.ts` imports, wh
// Best practices
"strict": true,
"skipLibCheck": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags
"noUnusedLocals": true,
"noUnusedParameters": true,
"useUnknownInCatchVariables": true,
"noPropertyAccessFromIndexSignature": true
}
}
```
If you run `bun init` in a new directory, this `tsconfig.json` will be generated for you. (The stricter flags are disabled by default.)
If you run `bun init` in a new directory, this `tsconfig.json` will be generated for you.
```sh
$ bun init

View File

@@ -21,7 +21,7 @@ const withExtensions = [
return !!json[key]?.extensions?.length;
})
.flatMap(mime => {
return [...new Set(json[mime].extensions)].map(ext => {
return [...new Set([...json[mime].extensions])].map(ext => {
return [`.{.@"${ext}", all.@"${mime}"}`];
});
})

View File

@@ -2,13 +2,14 @@
"private": true,
"name": "bun",
"dependencies": {
"@biomejs/biome": "1.5.3",
"@vscode/debugadapter": "^1.61.0",
"esbuild": "^0.17.15",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"mitata": "^0.1.3",
"peechy": "0.4.34",
"prettier": "^3.2.5",
"prettier": "3.2.2",
"react": "next",
"react-dom": "next",
"source-map-js": "^1.0.2",
@@ -24,10 +25,9 @@
"build": "if [ ! -e build ]; then bun setup; fi && ninja -C build",
"build:valgrind": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-valgrind && ninja -Cbuild-valgrind",
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
"fmt": "prettier --write --cache './{.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}/**/*.{mjs,ts,tsx,js,jsx}'",
"fmt": "biome format --write {.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}",
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix",

Binary file not shown.

Binary file not shown.

View File

@@ -7,11 +7,9 @@
"runners/qunit"
],
"dependencies": {
"@actions/core": "latest",
"p-queue": "^8.0.1"
"@actions/core": "latest"
},
"devDependencies": {
"@types/p-queue": "^3.2.1",
"bun-types": "canary",
"prettier": "^2.8.2"
},

View File

@@ -1,91 +1,49 @@
import * as action from "@actions/core";
import { spawn, spawnSync } from "child_process";
import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, close, closeSync } from "fs";
import { readFile, rm } from "fs/promises";
import { rmSync, writeFileSync, readFileSync } from "fs";
import { readFile } from "fs/promises";
import { readdirSync } from "node:fs";
import { resolve, basename } from "node:path";
import { constants, cpus, hostname, tmpdir, totalmem, userInfo } from "os";
import { join, normalize } from "path";
import { cpus, hostname, totalmem, userInfo } from "os";
import { fileURLToPath } from "url";
import PQueue from "p-queue";
const run_start = new Date();
const TIMEOUT_DURATION = 1000 * 60 * 5;
const SHORT_TIMEOUT_DURATION = Math.ceil(TIMEOUT_DURATION / 5);
function defaultConcurrency() {
// This causes instability due to the number of open file descriptors / sockets in some tests
// Windows has higher limits
if (process.platform !== "win32") {
return 1;
}
return Math.min(Math.floor((cpus().length - 2) / 2), 2);
}
const windows = process.platform === "win32";
const KEEP_TMPDIR = process.env["BUN_KEEP_TMPDIR"] === "1";
const nativeMemory = totalmem();
const force_ram_size_input = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
let force_ram_size = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
if (!(Number.isSafeInteger(force_ram_size_input) && force_ram_size_input > 0)) {
force_ram_size = force_ram_size_input + "";
}
function uncygwinTempDir() {
if (process.platform === "win32") {
for (let key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP"]) {
let TMPDIR = process.env[key] || "";
if (!/^\/[a-zA-Z]\//.test(TMPDIR)) {
continue;
}
const driveLetter = TMPDIR[1];
TMPDIR = path.win32.normalize(`${driveLetter.toUpperCase()}:` + TMPDIR.substring(2));
process.env[key] = TMPDIR;
}
}
}
uncygwinTempDir();
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
process.chdir(cwd);
const ci = !!process.env["GITHUB_ACTIONS"];
const enableProgressBar = false;
const enableProgressBar = !ci;
const dirPrefix = "bun-test-tmp-" + ((Math.random() * 100_000_0) | 0).toString(36) + "_";
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const queue = new PQueue({ concurrency: run_concurrency });
var prevTmpdir = "";
function maketemp() {
prevTmpdir = join(
tmpdir(),
dirPrefix + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36),
);
mkdirSync(prevTmpdir, { recursive: true });
return prevTmpdir;
function defaultConcurrency() {
// Concurrency causes more flaky tests, only enable it by default on windows
// See https://github.com/oven-sh/bun/issues/8071
if (windows) {
return Math.floor((cpus().length - 2) / 2);
}
return 1;
}
const extensions = [".js", ".ts", ".jsx", ".tsx", ".mjs", ".cjs", ".mts", ".cts", ".mjsx", ".cjsx", ".mtsx", ".ctsx"];
const run_concurrency = Math.max(Number(process.env["BUN_TEST_CONCURRENCY"] || defaultConcurrency(), 10), 1);
const extensions = [".js", ".ts", ".jsx", ".tsx"];
const git_sha =
process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim();
const TEST_FILTER = process.env.BUN_TEST_FILTER;
function isTest(path) {
if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) {
return false;
}
if (TEST_FILTER) {
if (!path.includes(TEST_FILTER)) {
return false;
}
}
return true;
}
@@ -100,15 +58,8 @@ function* findTests(dir, query) {
}
}
let bunExe = "bun";
if (process.argv.length > 2) {
bunExe = resolve(process.argv.at(-1));
} else if (process.env.BUN_PATH) {
const { BUN_PATH_BASE, BUN_PATH } = process.env;
bunExe = resolve(normalize(BUN_PATH_BASE), normalize(BUN_PATH));
}
// pick the last one, kind of a hack to allow 'bun run test bun-release' to test the release build
let bunExe = (process.argv.length > 2 ? process.argv[process.argv.length - 1] : null) ?? "bun";
const { error, stdout: revision_stdout } = spawnSync(bunExe, ["--revision"], {
env: { ...process.env, BUN_DEBUG_QUIET_LOGS: 1 },
});
@@ -149,182 +100,57 @@ const failing_tests = [];
const passing_tests = [];
const fixes = [];
const regressions = [];
let maxFd = -1;
function getMaxFileDescriptor(path) {
if (process.platform === "win32") {
return -1;
}
hasInitialMaxFD = true;
if (process.platform === "linux") {
try {
readdirSync("/proc/self/fd").forEach(name => {
const fd = parseInt(name.trim(), 10);
if (Number.isSafeInteger(fd) && fd >= 0) {
maxFd = Math.max(maxFd, fd);
}
});
return maxFd;
} catch {}
}
const devnullfd = openSync("/dev/null", "r");
closeSync(devnullfd);
maxFd = devnullfd + 1;
return maxFd;
}
let hasInitialMaxFD = false;
const activeTests = new Map();
let slowTestCount = 0;
function checkSlowTests() {
const now = Date.now();
const prevSlowTestCount = slowTestCount;
slowTestCount = 0;
for (const [path, { start, proc }] of activeTests) {
if (proc && now - start >= TIMEOUT_DURATION) {
console.error(
`\x1b[31merror\x1b[0;2m:\x1b[0m Killing test ${JSON.stringify(path)} after ${Math.ceil((now - start) / 1000)}s`,
);
proc?.stdout?.destroy?.();
proc?.stderr?.destroy?.();
proc?.kill?.();
} else if (now - start > SHORT_TIMEOUT_DURATION) {
console.error(
`\x1b[33mwarning\x1b[0;2m:\x1b[0m Test ${JSON.stringify(path)} has been running for ${Math.ceil(
(now - start) / 1000,
)}s`,
);
slowTestCount++;
}
}
if (slowTestCount > prevSlowTestCount && queue.concurrency > 1) {
queue.concurrency += 1;
}
}
setInterval(checkSlowTests, SHORT_TIMEOUT_DURATION).unref();
var currentTestNumber = 0;
async function runTest(path) {
const thisTestNumber = currentTestNumber++;
const name = path.replace(cwd, "").slice(1);
let exitCode, signal, err, output;
const expected_crash_reason = windows
? await readFile(resolve(path), "utf-8").then(data => {
const match = data.match(/@known-failing-on-windows:(.*)\n/);
return match ? match[1].trim() : null;
})
const match = data.match(/@known-failing-on-windows:(.*)\n/);
return match ? match[1].trim() : null;
})
: null;
const start = Date.now();
const activeTestObject = { start, proc: undefined };
activeTests.set(path, activeTestObject);
try {
await new Promise((finish, reject) => {
const chunks = [];
process.stderr.write(
`
at ${((start - run_start.getTime()) / 1000).toFixed(2)}s, file ${thisTestNumber
.toString()
.padStart(total.toString().length, "0")}/${total}, ${failing_tests.length} failing files
Starting "${name}"
`,
);
const TMPDIR = maketemp();
const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"],
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
},
});
activeTestObject.proc = proc;
proc.stdout.once("end", () => {
done();
});
let doneCalls = 0;
var done = () => {
// TODO: wait for stderr as well
// spawn.test currently causes it to hang
if (doneCalls++ === 1) {
actuallyDone();
}
};
var actuallyDone = function () {
actuallyDone = done = () => {};
proc?.stderr?.unref?.();
proc?.stdout?.unref?.();
proc?.unref?.();
output = Buffer.concat(chunks).toString();
finish();
};
// if (!KEEP_TMPDIR)
// proc.once("close", () => {
// rm(TMPDIR, { recursive: true, force: true }).catch(() => {});
// });
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.once("close", () => {
activeTestObject.proc = undefined;
});
proc.once("exit", (code_, signal_) => {
activeTestObject.proc = undefined;
exitCode = code_;
signal = signal_;
if (signal || exitCode !== 0) {
actuallyDone();
} else {
done();
}
});
proc.once("error", err_ => {
activeTestObject.proc = undefined;
err = err_;
actuallyDone();
});
await new Promise((done, reject) => {
const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"],
timeout: 1000 * 60 * 3,
env: {
...process.env,
FORCE_COLOR: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
BUN_JSC_forceRAMSize: force_ram_size,
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
// reproduce CI results locally
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1",
},
});
} finally {
activeTests.delete(path);
}
if (!hasInitialMaxFD) {
getMaxFileDescriptor();
} else if (maxFd > 0) {
const prevMaxFd = maxFd;
maxFd = getMaxFileDescriptor();
if (maxFd > prevMaxFd + queue.concurrency * 2) {
process.stderr.write(
`\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${name}, delta: ${
maxFd - prevMaxFd
}, current: ${maxFd}, previous: ${prevMaxFd}\n`,
);
}
}
const chunks = [];
proc.stdout.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk);
});
proc.stderr.on("data", chunk => {
chunks.push(chunk);
if (run_concurrency === 1) process.stderr.write(chunk);
});
proc.on("exit", (code_, signal_) => {
exitCode = code_;
signal = signal_;
output = Buffer.concat(chunks).toString();
done();
});
proc.on("error", err_ => {
err = err_;
done();
});
});
const passed = exitCode === 0 && !err && !signal;
@@ -369,8 +195,7 @@ Starting "${name}"
}
console.log(
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
} ${name}\x1b[0m${reason ? ` (${reason})` : ""}`,
);
@@ -392,7 +217,6 @@ Starting "${name}"
}
failing_tests.push({ path: name, reason, output, expected_crash_reason });
process.exitCode = 1;
if (err) console.error(err);
} else {
if (windows && expected_crash_reason !== null) {
@@ -401,11 +225,13 @@ Starting "${name}"
passing_tests.push(name);
}
return passed;
}
var finished = 0;
const queue = [...findTests(resolve(cwd, "test"))];
let running = 0;
let total = queue.length;
let finished = 0;
let on_entry_finish = null;
function writeProgressBar() {
const barWidth = Math.min(process.stdout.columns || 40, 80) - 2;
@@ -415,23 +241,34 @@ function writeProgressBar() {
process.stdout.write(`\r${str1}${" ".repeat(barWidth - str1.length)}]`);
}
const allTests = [...findTests(resolve(cwd, "test"))];
console.log(`Starting ${allTests.length} tests with ${run_concurrency} concurrency...`);
let total = allTests.length;
for (const path of allTests) {
queue.add(
async () =>
await runTest(path).catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
}),
);
while (queue.length > 0) {
if (running >= run_concurrency) {
await new Promise(resolve => (on_entry_finish = resolve));
continue;
}
const path = queue.shift();
running++;
runTest(path)
.catch(e => {
console.error("Bug in bun-internal-test");
console.error(e);
process.exit(1);
})
.finally(() => {
running--;
if (on_entry_finish) {
on_entry_finish();
on_entry_finish = null;
}
});
}
while (running > 0) {
await Promise.race([
new Promise(resolve => (on_entry_finish = resolve)),
new Promise(resolve => setTimeout(resolve, 1000)),
]);
}
await queue.onIdle();
console.log(`
Completed ${total} tests with ${failing_tests.length} failing tests
`);
console.log("\n");
function linkToGH(linkTo) {
@@ -442,13 +279,10 @@ function sectionLink(linkTo) {
return "#" + linkTo.replace(/[^a-zA-Z0-9_-]/g, "").toLowerCase();
}
failing_tests.sort((a, b) => a.path.localeCompare(b.path));
passing_tests.sort((a, b) => a.localeCompare(b));
const failingTestDisplay = failing_tests
.filter(({ reason }) => !regressions.some(({ path }) => path === path))
.map(({ path, reason }) => `- [\`${path}\`](${sectionLink(path)})${reason ? ` ${reason}` : ""}`)
.join("\n");
// const passingTestDisplay = passing_tests.map(path => `- \`${path}\``).join("\n");
rmSync("report.md", { force: true });
@@ -485,10 +319,9 @@ console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n"
console.log(header);
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
let report = `# bun test on ${
process.env["GITHUB_REF"] ??
let report = `# bun test on ${process.env["GITHUB_REF"] ??
spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim()
}
}
\`\`\`
${header}
@@ -512,8 +345,7 @@ if (regressions.length > 0) {
report += regressions
.map(
({ path, reason, expected_crash_reason }) =>
`- [\`${path}\`](${sectionLink(path)}) ${reason}${
expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
`- [\`${path}\`](${sectionLink(path)}) ${reason}${expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
}`,
)
.join("\n");
@@ -521,7 +353,7 @@ if (regressions.length > 0) {
}
if (failingTestDisplay.length > 0) {
report += `## Failing tests\n\n`;
report += `## ${windows ? "Known " : ""}Failing tests\n\n`;
report += failingTestDisplay;
report += "\n\n";
}
@@ -590,4 +422,4 @@ if (ci) {
}
}
process.exit(failing_tests.length ? 1 : process.exitCode);
process.exit(failing_tests.length ? 1 : 0);

Binary file not shown.

View File

@@ -32,20 +32,20 @@ export class PublishCommand extends BuildCommand {
}
const { layer, region, arch, output, public: isPublic } = flags;
if (region.includes("*")) {
// prettier-ignore
// biome-ignore: format ignore
const result = this.#aws(["ec2", "describe-regions", "--query", "Regions[].RegionName", "--output", "json"]);
region.length = 0;
for (const name of JSON.parse(result)) {
region.push(name);
}
} else if (!region.length) {
// prettier-ignore
// biome-ignore: format ignore
region.push(this.#aws(["configure", "get", "region"]));
}
this.log("Publishing...");
for (const regionName of region) {
for (const layerName of layer) {
// prettier-ignore
// biome-ignore: format ignore
const result = this.#aws([
"lambda",
"publish-layer-version",
@@ -70,7 +70,7 @@ export class PublishCommand extends BuildCommand {
const { LayerVersionArn } = JSON.parse(result);
this.log("Published", LayerVersionArn);
if (isPublic) {
// prettier-ignore
// biome-ignore: format ignore
this.#aws([
"lambda",
"add-layer-version-permission",

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -47,59 +47,9 @@ declare module "bun" {
*/
function which(command: string, options?: { PATH?: string; cwd?: string }): string | null;
/**
* Get the column count of a string as it would be displayed in a terminal.
* Supports ANSI escape codes, emoji, and wide characters.
*
* This is useful for:
* - Aligning text in a terminal
* - Quickly checking if a string contains ANSI escape codes
* - Measuring the width of a string in a terminal
*
* This API is designed to match the popular "string-width" package, so that
* existing code can be easily ported to Bun and vice versa.
*
* @returns The width of the string in columns
*
* ## Examples
* @example
* ```ts
* import { stringWidth } from "bun";
*
* console.log(stringWidth("abc")); // 3
* console.log(stringWidth("👩‍👩‍👧‍👦")); // 1
* console.log(stringWidth("\u001b[31mhello\u001b[39m")); // 5
* console.log(stringWidth("\u001b[31mhello\u001b[39m", { countAnsiEscapeCodes: false })); // 5
* console.log(stringWidth("\u001b[31mhello\u001b[39m", { countAnsiEscapeCodes: true })); // 13
* ```
*
*/
function stringWidth(
/**
* The string to measure
*/
input: string,
options?: {
/**
* If `true`, count ANSI escape codes as part of the string width. If `false`, ANSI escape codes are ignored when calculating the string width.
*
* @default false
*/
countAnsiEscapeCodes?: boolean;
/**
* When it's ambiugous and `true`, count emoji as 1 characters wide. If `false`, emoji are counted as 2 character wide.
*
* @default true
*/
ambiguousIsNarrow?: boolean;
},
): number;
export type ShellFunction = (input: Uint8Array) => Uint8Array;
export type ShellExpression =
| { toString(): string }
| Array<ShellExpression>
| string
| { raw: string }
| Subprocess
@@ -107,75 +57,6 @@ declare module "bun" {
| SpawnOptions.Writable
| ReadableStream;
class ShellError extends Error implements ShellOutput {
readonly stdout: Buffer;
readonly stderr: Buffer;
readonly exitCode: number;
/**
* Read from stdout as a string
*
* @param encoding - The encoding to use when decoding the output
* @returns Stdout as a string with the given encoding
* @example
*
* ## Read as UTF-8 string
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.text()); // "hello\n"
* ```
*
* ## Read as base64 string
*
* ```ts
* const output = await $`echo ${atob("hello")}`;
* console.log(output.text("base64")); // "hello\n"
* ```
*
*/
text(encoding?: BufferEncoding): string;
/**
* Read from stdout as a JSON object
*
* @returns Stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`;
* console.log(output.json()); // { hello: 123 }
* ```
*
*/
json(): any;
/**
* Read from stdout as an ArrayBuffer
*
* @returns Stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.arrayBuffer()); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): ArrayBuffer;
/**
* Read from stdout as a Blob
*
* @returns Stdout as a blob
* @example
* ```ts
* const output = await $`echo hello`;
* console.log(output.blob()); // Blob { size: 6, type: "" }
* ```
*/
blob(): Blob;
}
class ShellPromise extends Promise<ShellOutput> {
get stdin(): WritableStream;
/**
@@ -275,16 +156,6 @@ declare module "bun" {
* ```
*/
blob(): Promise<Blob>;
/**
* Configure the shell to not throw an exception on non-zero exit codes.
*/
nothrow(): this;
/**
* Configure whether or not the shell should throw an exception on non-zero exit codes.
*/
throws(shouldThrow: boolean): this;
}
interface ShellConstructor {
@@ -336,16 +207,6 @@ declare module "bun" {
*/
cwd(newCwd?: string): this;
/**
* Configure the shell to not throw an exception on non-zero exit codes.
*/
nothrow(): this;
/**
* Configure whether or not the shell should throw an exception on non-zero exit codes.
*/
throws(shouldThrow: boolean): this;
readonly ShellPromise: typeof ShellPromise;
readonly Shell: ShellConstructor;
}
@@ -354,69 +215,6 @@ declare module "bun" {
readonly stdout: Buffer;
readonly stderr: Buffer;
readonly exitCode: number;
/**
* Read from stdout as a string
*
* @param encoding - The encoding to use when decoding the output
* @returns Stdout as a string with the given encoding
* @example
*
* ## Read as UTF-8 string
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.text()); // "hello\n"
* ```
*
* ## Read as base64 string
*
* ```ts
* const output = await $`echo ${atob("hello")}`;
* console.log(output.text("base64")); // "hello\n"
* ```
*
*/
text(encoding?: BufferEncoding): string;
/**
* Read from stdout as a JSON object
*
* @returns Stdout as a JSON object
* @example
*
* ```ts
* const output = await $`echo '{"hello": 123}'`;
* console.log(output.json()); // { hello: 123 }
* ```
*
*/
json(): any;
/**
* Read from stdout as an ArrayBuffer
*
* @returns Stdout as an ArrayBuffer
* @example
*
* ```ts
* const output = await $`echo hello`;
* console.log(output.arrayBuffer()); // ArrayBuffer { byteLength: 6 }
* ```
*/
arrayBuffer(): ArrayBuffer;
/**
* Read from stdout as a Blob
*
* @returns Stdout as a blob
* @example
* ```ts
* const output = await $`echo hello`;
* console.log(output.blob()); // Blob { size: 6, type: "" }
* ```
*/
blob(): Blob;
}
export const $: Shell;
@@ -825,7 +623,7 @@ declare module "bun" {
* console.log(path); // "/foo/bar.txt"
* ```
*/
function fileURLToPath(url: URL | string): string;
function fileURLToPath(url: URL): string;
/**
* Fast incremental writer that becomes an `ArrayBuffer` on end().
@@ -1444,14 +1242,6 @@ declare module "bun" {
// origin?: string; // e.g. http://mydomain.com
loader?: { [k in string]: Loader };
sourcemap?: "none" | "inline" | "external"; // default: "none"
/**
* package.json `exports` conditions used when resolving imports
*
* Equivalent to `--conditions` in `bun build` or `bun run`.
*
* https://nodejs.org/api/packages.html#exports
*/
conditions?: Array<string> | string;
minify?:
| boolean
| {
@@ -2041,7 +1831,6 @@ declare module "bun" {
* return new Response("Hello World");
* },
* });
* ```
*/
interface WebSocketHandler<T = undefined> {
/**
@@ -4132,33 +3921,22 @@ declare module "bun" {
/**
* If true, the subprocess will have a hidden window.
*/
windowsHide?: boolean;
/**
* Path to the executable to run in the subprocess. This defaults to `cmds[0]`.
*
* One use-case for this is for applications which wrap other applications or to simulate a symlink.
*
* @default cmds[0]
*/
argv0?: string;
// windowsHide?: boolean;
}
type OptionsToSubprocess<Opts extends OptionsObject> =
Opts extends OptionsObject<infer In, infer Out, infer Err>
? Subprocess<
// "Writable extends In" means "if In === Writable",
// aka if true that means the user didn't specify anything
Writable extends In ? "ignore" : In,
Readable extends Out ? "pipe" : Out,
Readable extends Err ? "inherit" : Err
>
: Subprocess<Writable, Readable, Readable>;
type OptionsToSubprocess<Opts extends OptionsObject> = Opts extends OptionsObject<infer In, infer Out, infer Err>
? Subprocess<
// "Writable extends In" means "if In === Writable",
// aka if true that means the user didn't specify anything
Writable extends In ? "ignore" : In,
Readable extends Out ? "pipe" : Out,
Readable extends Err ? "inherit" : Err
>
: Subprocess<Writable, Readable, Readable>;
type OptionsToSyncSubprocess<Opts extends OptionsObject> =
Opts extends OptionsObject<any, infer Out, infer Err>
? SyncSubprocess<Readable extends Out ? "pipe" : Out, Readable extends Err ? "pipe" : Err>
: SyncSubprocess<Readable, Readable>;
type OptionsToSyncSubprocess<Opts extends OptionsObject> = Opts extends OptionsObject<any, infer Out, infer Err>
? SyncSubprocess<Readable extends Out ? "pipe" : Out, Readable extends Err ? "pipe" : Err>
: SyncSubprocess<Readable, Readable>;
type ReadableIO = ReadableStream<Uint8Array> | number | undefined;
@@ -4195,19 +3973,19 @@ declare module "bun" {
};
/**
* The amount of CPU time used by the process, in microseconds.
* The amount of CPU time used by the process, in nanoseconds.
*/
cpuTime: {
/**
* User CPU time used by the process, in microseconds.
* User CPU time used by the process, in nanoseconds.
*/
user: number;
/**
* System CPU time used by the process, in microseconds.
* System CPU time used by the process, in nanoseconds.
*/
system: number;
/**
* Total CPU time used by the process, in microseconds.
* Total CPU time used by the process, in nanoseconds.
*/
total: number;
};
@@ -4325,7 +4103,7 @@ declare module "bun" {
* Kill the process
* @param exitCode The exitCode to send to the process
*/
kill(exitCode?: number | NodeJS.Signals): void;
kill(exitCode?: number): void;
/**
* This method will tell Bun to wait for this process to exit after you already
@@ -4385,8 +4163,6 @@ declare module "bun" {
* Get the resource usage information of the process (max RSS, CPU time, etc)
*/
resourceUsage: ResourceUsage;
signalCode?: string;
}
/**
@@ -4482,8 +4258,6 @@ declare module "bun" {
* ```
*/
cmd: string[];
onExit?: never;
},
): SpawnOptions.OptionsToSyncSubprocess<Opts>;

Binary file not shown.

View File

@@ -584,7 +584,7 @@ declare global {
/**
* If set, specifies the initial value of process.env inside the Worker thread. As a special value, worker.SHARE_ENV may be used to specify that the parent thread and the child thread should share their environment variables; in that case, changes to one thread's process.env object affect the other thread as well. Default: process.env.
*/
env?: Record<string, string> | (typeof import("node:worker_threads"))["SHARE_ENV"] | undefined;
env?: Record<string, string> | typeof import("node:worker_threads")["SHARE_ENV"] | undefined;
/**
* In Bun, this does nothing.

View File

@@ -91,9 +91,7 @@ declare module "bun:test" {
interface Jest {
restoreAllMocks(): void;
clearAllMocks(): void;
fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
setSystemTime(now?: number | Date): void;
}
export const jest: Jest;
export namespace jest {
@@ -547,16 +545,6 @@ declare module "bun:test" {
* ```
*/
unreachable(msg?: string | Error): never;
/**
* Ensures that an assertion is made
*/
hasAssertions(): void;
/**
* Ensures that a specific number of assertions are made
*/
assertions(neededAssertions: number): void;
}
/**
@@ -884,19 +872,6 @@ declare module "bun:test" {
* @param expected the expected value
*/
toStrictEqual(expected: T): void;
/**
* Asserts that the value is deep equal to an element in the expected array.
*
* The value must be an array or iterable, which includes strings.
*
* @example
* expect(1).toBeOneOf([1,2,3]);
* expect("foo").toBeOneOf(["foo", "bar"]);
* expect(true).toBeOneOf(new Set([true]));
*
* @param expected the expected value
*/
toBeOneOf(expected: Array<unknown> | Iterable<unknown>): void;
/**
* Asserts that a value contains what is expected.
*
@@ -1127,27 +1102,6 @@ declare module "bun:test" {
* @param expected the expected error, error message, or error pattern
*/
toThrow(expected?: unknown): void;
/**
* Asserts that a function throws an error.
*
* - If expected is a `string` or `RegExp`, it will check the `message` property.
* - If expected is an `Error` object, it will check the `name` and `message` properties.
* - If expected is an `Error` constructor, it will check the class of the `Error`.
* - If expected is not provided, it will check if anything as thrown.
*
* @example
* function fail() {
* throw new Error("Oops!");
* }
* expect(fail).toThrowError("Oops!");
* expect(fail).toThrowError(/oops/i);
* expect(fail).toThrowError(Error);
* expect(fail).toThrowError();
*
* @param expected the expected error, error message, or error pattern
* @alias toThrow
*/
toThrowError(expected?: unknown): void;
/**
* Asserts that a value matches a regular expression or includes a substring.
*
@@ -1431,47 +1385,22 @@ declare module "bun:test" {
* Ensures that a mock function is called.
*/
toHaveBeenCalled(): void;
/**
* Ensures that a mock function is called an exact number of times.
* @alias toHaveBeenCalled
*/
toBeCalled(): void;
/**
* Ensures that a mock function is called an exact number of times.
*/
toHaveBeenCalledTimes(expected: number): void;
/**
* Ensure that a mock function is called with specific arguments.
* @alias toHaveBeenCalledTimes
*/
toBeCalledTimes(expected: number): void;
/**
* Ensure that a mock function is called with specific arguments.
*/
toHaveBeenCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments.
* @alias toHaveBeenCalledWith
*/
toBeCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the last call.
*/
toHaveBeenLastCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the nth call.
* @alias toHaveBeenCalledWith
*/
lastCalledWith(...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the nth call.
*/
toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void;
/**
* Ensure that a mock function is called with specific arguments for the nth call.
* @alias toHaveBeenCalledWith
*/
nthCalledWith(n: number, ...expected: unknown[]): void;
}
/**

View File

@@ -392,12 +392,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_accept_socket(LIBUS_SOCKET_DESCRIPTOR fd, struct bsd
internal_finalize_bsd_addr(addr);
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
// skip the extra fcntl calls.
return accepted_fd;
#else
return bsd_set_nonblocking(apple_no_sigpipe(accepted_fd));
#endif
}
int bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
@@ -478,7 +473,7 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
setsockopt(listenFd, SOL_SOCKET, SO_REUSEADDR, (void *) &optval3, sizeof(optval3));
}
#else
#if /*defined(__linux__) &&*/ defined(SO_REUSEPORT)
#if /*defined(__linux) &&*/ defined(SO_REUSEPORT)
if (!(options & LIBUS_LISTEN_EXCLUSIVE_PORT)) {
int optval = 1;
setsockopt(listenFd, SOL_SOCKET, SO_REUSEPORT, (void *) &optval, sizeof(optval));
@@ -567,91 +562,8 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket(const char *host, int port, int
#endif
#include <sys/stat.h>
#include <stddef.h>
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, int options) {
static int bsd_create_unix_socket_address(const char *path, size_t path_len, int* dirfd_linux_workaround_for_unix_path_len, struct sockaddr_un *server_address, size_t* addrlen) {
memset(server_address, 0, sizeof(struct sockaddr_un));
server_address->sun_family = AF_UNIX;
if (path_len == 0) {
#if defined(_WIN32)
// simulate ENOENT
SetLastError(ERROR_PATH_NOT_FOUND);
#else
errno = ENOENT;
#endif
return LIBUS_SOCKET_ERROR;
}
*addrlen = sizeof(struct sockaddr_un);
#if defined(__linux__)
// Unix socket addresses have a maximum length of 108 bytes on Linux
// As a workaround, we can use /proc/self/fd/ as a directory to shorten the path
if (path_len >= sizeof(server_address->sun_path) && path[0] != '\0') {
size_t dirname_len = path_len;
// get the basename
while (dirname_len > 1 && path[dirname_len - 1] != '/') {
dirname_len--;
}
// if the path is just a single character, or the path is too long, we cannot use this method
if (dirname_len < 2 || (path_len - dirname_len + 1) >= sizeof(server_address->sun_path)) {
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
char dirname_buf[4096];
if (dirname_len + 1 > sizeof(dirname_buf)) {
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
memcpy(dirname_buf, path, dirname_len);
dirname_buf[dirname_len] = 0;
int socket_dir_fd = open(dirname_buf, O_CLOEXEC | O_PATH | O_DIRECTORY, 0700);
if (socket_dir_fd == -1) {
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
int sun_path_len = snprintf(server_address->sun_path, sizeof(server_address->sun_path), "/proc/self/fd/%d/%s", socket_dir_fd, path + dirname_len);
if (sun_path_len >= sizeof(server_address->sun_path) || sun_path_len < 0) {
close(socket_dir_fd);
errno = ENAMETOOLONG;
return LIBUS_SOCKET_ERROR;
}
*dirfd_linux_workaround_for_unix_path_len = socket_dir_fd;
return 0;
} else if (path_len < sizeof(server_address->sun_path)) {
memcpy(server_address->sun_path, path, path_len);
// abstract domain sockets
if (server_address->sun_path[0] == 0) {
*addrlen = offsetof(struct sockaddr_un, sun_path) + path_len;
}
return 0;
}
#endif
if (path_len >= sizeof(server_address->sun_path)) {
#if defined(_WIN32)
// simulate ENAMETOOLONG
SetLastError(ERROR_FILENAME_EXCED_RANGE);
#else
errno = ENAMETOOLONG;
#endif
return LIBUS_SOCKET_ERROR;
}
memcpy(server_address->sun_path, path, path_len);
return 0;
}
static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char* path, int options, struct sockaddr_un* server_address, size_t addrlen) {
LIBUS_SOCKET_DESCRIPTOR listenFd = LIBUS_SOCKET_ERROR;
listenFd = bsd_create_socket(AF_UNIX, SOCK_STREAM, 0);
@@ -667,47 +579,25 @@ static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_listen_socket_unix(const char
_chmod(path, S_IREAD | S_IWRITE | S_IEXEC);
#endif
struct sockaddr_un server_address;
memset(&server_address, 0, sizeof(server_address));
server_address.sun_family = AF_UNIX;
strcpy(server_address.sun_path, path);
int size = offsetof(struct sockaddr_un, sun_path) + strlen(server_address.sun_path);
#ifdef _WIN32
_unlink(path);
#else
unlink(path);
#endif
if (bind(listenFd, (struct sockaddr *)server_address, addrlen) || listen(listenFd, 512)) {
#if defined(_WIN32)
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
#endif
if (bind(listenFd, (struct sockaddr *)&server_address, size) || listen(listenFd, 512)) {
bsd_close_socket(listenFd);
#if defined(_WIN32)
if (shouldSimulateENOENT) {
SetLastError(ERROR_PATH_NOT_FOUND);
}
#endif
return LIBUS_SOCKET_ERROR;
}
return listenFd;
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, size_t len, int options) {
int dirfd_linux_workaround_for_unix_path_len = -1;
struct sockaddr_un server_address;
size_t addrlen = 0;
if (bsd_create_unix_socket_address(path, len, &dirfd_linux_workaround_for_unix_path_len, &server_address, &addrlen)) {
return LIBUS_SOCKET_ERROR;
}
LIBUS_SOCKET_DESCRIPTOR listenFd = internal_bsd_create_listen_socket_unix(path, options, &server_address, addrlen);
#if defined(__linux__)
if (dirfd_linux_workaround_for_unix_path_len != -1) {
close(dirfd_linux_workaround_for_unix_path_len);
}
#endif
return listenFd;
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port) {
struct addrinfo hints, *result;
memset(&hints, 0, sizeof(struct addrinfo));
@@ -724,7 +614,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port) {
}
LIBUS_SOCKET_DESCRIPTOR listenFd = LIBUS_SOCKET_ERROR;
struct addrinfo *listenAddr = NULL;
struct addrinfo *listenAddr;
for (struct addrinfo *a = result; a && listenFd == LIBUS_SOCKET_ERROR; a = a->ai_next) {
if (a->ai_family == AF_INET6) {
listenFd = bsd_create_socket(a->ai_family, a->ai_socktype, a->ai_protocol);
@@ -968,44 +858,24 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket(const char *host, int port, co
#endif
}
static LIBUS_SOCKET_DESCRIPTOR internal_bsd_create_connect_socket_unix(const char *server_path, size_t len, int options, struct sockaddr_un* server_address, const size_t addrlen) {
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, int options) {
struct sockaddr_un server_address;
memset(&server_address, 0, sizeof(server_address));
server_address.sun_family = AF_UNIX;
strcpy(server_address.sun_path, server_path);
int size = offsetof(struct sockaddr_un, sun_path) + strlen(server_address.sun_path);
LIBUS_SOCKET_DESCRIPTOR fd = bsd_create_socket(AF_UNIX, SOCK_STREAM, 0);
if (fd == LIBUS_SOCKET_ERROR) {
return LIBUS_SOCKET_ERROR;
}
if (connect(fd, (struct sockaddr *)server_address, addrlen) != 0 && errno != EINPROGRESS) {
#if defined(_WIN32)
int shouldSimulateENOENT = WSAGetLastError() == WSAENETDOWN;
#endif
if (connect(fd, (struct sockaddr *)&server_address, size) != 0 && errno != EINPROGRESS) {
bsd_close_socket(fd);
#if defined(_WIN32)
if (shouldSimulateENOENT) {
SetLastError(ERROR_PATH_NOT_FOUND);
}
#endif
return LIBUS_SOCKET_ERROR;
}
return fd;
}
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, size_t len, int options) {
struct sockaddr_un server_address;
size_t addrlen = 0;
int dirfd_linux_workaround_for_unix_path_len = -1;
if (bsd_create_unix_socket_address(server_path, len, &dirfd_linux_workaround_for_unix_path_len, &server_address, &addrlen)) {
return LIBUS_SOCKET_ERROR;
}
LIBUS_SOCKET_DESCRIPTOR fd = internal_bsd_create_connect_socket_unix(server_path, len, options, &server_address, addrlen);
#if defined(__linux__)
if (dirfd_linux_workaround_for_unix_path_len != -1) {
close(dirfd_linux_workaround_for_unix_path_len);
}
#endif
return fd;
}

View File

@@ -221,10 +221,19 @@ struct us_socket_context_t *us_create_socket_context(int ssl, struct us_loop_t *
/* This path is taken once either way - always BEFORE whatever SSL may do LATER.
* context_ext_size will however be modified larger in case of SSL, to hold SSL extensions */
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
struct us_socket_context_t *context = us_malloc(sizeof(struct us_socket_context_t) + context_ext_size);
context->loop = loop;
context->head_sockets = 0;
context->head_listen_sockets = 0;
context->iterator = 0;
context->next = 0;
context->is_low_prio = default_is_low_prio_handler;
/* Begin at 0 */
context->timestamp = 0;
context->long_timestamp = 0;
context->global_tick = 0;
us_internal_loop_link(loop, context);
/* If we are called from within SSL code, SSL code will make further changes to us */
@@ -242,10 +251,19 @@ struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop
/* This path is taken once either way - always BEFORE whatever SSL may do LATER.
* context_ext_size will however be modified larger in case of SSL, to hold SSL extensions */
struct us_socket_context_t *context = us_calloc(1, sizeof(struct us_socket_context_t) + context_ext_size);
struct us_socket_context_t *context = us_malloc(sizeof(struct us_socket_context_t) + context_ext_size);
context->loop = loop;
context->head_sockets = 0;
context->head_listen_sockets = 0;
context->iterator = 0;
context->next = 0;
context->is_low_prio = default_is_low_prio_handler;
/* Begin at 0 */
context->timestamp = 0;
context->long_timestamp = 0;
context->global_tick = 0;
us_internal_loop_link(loop, context);
/* If we are called from within SSL code, SSL code will make further changes to us */
@@ -312,14 +330,14 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
return ls;
}
struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_socket_context_t *context, const char *path, size_t pathlen, int options, int socket_ext_size) {
struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_socket_context_t *context, const char *path, int options, int socket_ext_size) {
#ifndef LIBUS_NO_SSL
if (ssl) {
return us_internal_ssl_socket_context_listen_unix((struct us_internal_ssl_socket_context_t *) context, path, pathlen, options, socket_ext_size);
return us_internal_ssl_socket_context_listen_unix((struct us_internal_ssl_socket_context_t *) context, path, options, socket_ext_size);
}
#endif
LIBUS_SOCKET_DESCRIPTOR listen_socket_fd = bsd_create_listen_socket_unix(path, pathlen, options);
LIBUS_SOCKET_DESCRIPTOR listen_socket_fd = bsd_create_listen_socket_unix(path, options);
if (listen_socket_fd == LIBUS_SOCKET_ERROR) {
return 0;
@@ -372,14 +390,14 @@ struct us_socket_t *us_socket_context_connect(int ssl, struct us_socket_context_
return connect_socket;
}
struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_context_t *context, const char *server_path, size_t pathlen, int options, int socket_ext_size) {
struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_context_t *context, const char *server_path, int options, int socket_ext_size) {
#ifndef LIBUS_NO_SSL
if (ssl) {
return (struct us_socket_t *) us_internal_ssl_socket_context_connect_unix((struct us_internal_ssl_socket_context_t *) context, server_path, pathlen, options, socket_ext_size);
return (struct us_socket_t *) us_internal_ssl_socket_context_connect_unix((struct us_internal_ssl_socket_context_t *) context, server_path, options, socket_ext_size);
}
#endif
LIBUS_SOCKET_DESCRIPTOR connect_socket_fd = bsd_create_connect_socket_unix(server_path, pathlen, options);
LIBUS_SOCKET_DESCRIPTOR connect_socket_fd = bsd_create_connect_socket_unix(server_path, options);
if (connect_socket_fd == LIBUS_SOCKET_ERROR) {
return 0;
}

View File

@@ -44,6 +44,8 @@ void *sni_find(void *sni, const char *hostname);
#endif
#include "./root_certs.h"
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
NULL};
/* These are in root_certs.cpp */
extern X509_STORE *us_get_default_ca_store();
@@ -542,6 +544,7 @@ ssl_on_writable(struct us_internal_ssl_socket_t *s) {
0); // cast here!
}
// Do not call on_writable if the socket is closed.
// on close means the socket data is no longer accessible
if (!s || us_socket_is_closed(0, &s->s)) {
@@ -557,7 +560,7 @@ ssl_on_writable(struct us_internal_ssl_socket_t *s) {
void us_internal_init_loop_ssl_data(struct us_loop_t *loop) {
if (!loop->data.ssl_data) {
struct loop_ssl_data *loop_ssl_data =
us_calloc(1, sizeof(struct loop_ssl_data));
us_malloc(sizeof(struct loop_ssl_data));
loop_ssl_data->ssl_read_input_length = 0;
loop_ssl_data->ssl_read_input_offset = 0;
loop_ssl_data->last_write_was_msg_more = 0;
@@ -803,14 +806,18 @@ int add_ca_cert_to_ctx_store(SSL_CTX *ctx, const char *content,
X509_STORE *store) {
X509 *x = NULL;
BIO *in;
ERR_clear_error(); // clear error stack for SSL_CTX_use_certificate()
int count = 0;
BIO *in = BIO_new_mem_buf(content, strlen(content));
in = BIO_new_mem_buf(content, strlen(content));
if (in == NULL) {
OPENSSL_PUT_ERROR(SSL, ERR_R_BUF_LIB);
goto end;
}
int count = 0;
while ((x = PEM_read_bio_X509(in, NULL, SSL_CTX_get_default_passwd_cb(ctx),
SSL_CTX_get_default_passwd_cb_userdata(ctx)))) {
@@ -1216,10 +1223,10 @@ void us_internal_ssl_socket_context_add_server_name(
if (ssl_context) {
/* Attach the user data to this context */
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
#if BUN_DEBUG
printf("CANNOT SET EX DATA!\n");
abort();
#endif
#if BUN_DEBUG
printf("CANNOT SET EX DATA!\n");
abort();
#endif
}
/* * We do not want to hold any nullptr's in our SNI tree */
@@ -1440,8 +1447,8 @@ struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
struct us_listen_socket_t *us_internal_ssl_socket_context_listen_unix(
struct us_internal_ssl_socket_context_t *context, const char *path,
size_t pathlen, int options, int socket_ext_size) {
return us_socket_context_listen_unix(0, &context->sc, path, pathlen, options,
int options, int socket_ext_size) {
return us_socket_context_listen_unix(0, &context->sc, path, options,
sizeof(struct us_internal_ssl_socket_t) -
sizeof(struct us_socket_t) +
socket_ext_size);
@@ -1458,9 +1465,9 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
struct us_internal_ssl_socket_context_t *context, const char *server_path,
size_t pathlen, int options, int socket_ext_size) {
int options, int socket_ext_size) {
return (struct us_internal_ssl_socket_t *)us_socket_context_connect_unix(
0, &context->sc, server_path, pathlen, options,
0, &context->sc, server_path, options,
sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) +
socket_ext_size);
}

View File

@@ -29,10 +29,9 @@ void Bun__internal_dispatch_ready_poll(void* loop, void* poll);
#include <unistd.h>
#include <stdint.h>
#include <errno.h>
#include <string.h> // memset
#endif
void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs);
void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs, void*);
/* Pointer tags are used to indicate a Bun pointer versus a uSockets pointer */
#define UNSET_BITS_49_UNTIL_64 0x0000FFFFFFFFFFFF
@@ -110,7 +109,7 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
/* Loop */
struct us_loop_t *us_create_loop(void *hint, void (*wakeup_cb)(struct us_loop_t *loop), void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop), unsigned int ext_size) {
struct us_loop_t *loop = (struct us_loop_t *) us_calloc(1, sizeof(struct us_loop_t) + ext_size);
struct us_loop_t *loop = (struct us_loop_t *) us_malloc(sizeof(struct us_loop_t) + ext_size);
loop->num_polls = 0;
/* These could be accessed if we close a poll before starting the loop */
loop->num_ready_polls = 0;
@@ -176,7 +175,11 @@ void us_loop_run(struct us_loop_t *loop) {
}
}
void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs) {
void bun_on_tick_before(void* ctx);
void bun_on_tick_after(void* ctx);
void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs, void* tickCallbackContext) {
if (loop->num_polls == 0)
return;
@@ -188,6 +191,10 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs) {
us_loop_integrate(loop);
}
if (tickCallbackContext) {
bun_on_tick_before(tickCallbackContext);
}
/* Emit pre callback */
us_internal_loop_pre(loop);
@@ -214,6 +221,10 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs) {
}
#endif
if (tickCallbackContext) {
bun_on_tick_after(tickCallbackContext);
}
/* Iterate ready polls, dispatching them by type */
for (loop->current_ready_poll = 0; loop->current_ready_poll < loop->num_ready_polls; loop->current_ready_poll++) {
struct us_poll_t *poll = GET_READY_POLL(loop, loop->current_ready_poll);
@@ -287,7 +298,7 @@ int kqueue_change(int kqfd, int fd, int old_events, int new_events, void *user_d
EV_SET64(&change_list[change_length++], fd, EVFILT_WRITE, (new_events & LIBUS_SOCKET_WRITABLE) ? EV_ADD : EV_DELETE, 0, 0, (uint64_t)(void*)user_data, 0, 0);
}
int ret = kevent64(kqfd, change_list, change_length, change_list, change_length, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret = kevent64(kqfd, change_list, change_length, NULL, 0, 0, NULL);
// ret should be 0 in most cases (not guaranteed when removing async)
@@ -381,7 +392,6 @@ unsigned int us_internal_accept_poll_event(struct us_poll_t *p) {
#ifdef LIBUS_USE_EPOLL
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_poll_t *p = us_create_poll(loop, fallthrough, sizeof(struct us_internal_callback_t) + ext_size);
memset(p, 0, sizeof(struct us_internal_callback_t) + ext_size);
int timerfd = timerfd_create(CLOCK_REALTIME, TFD_NONBLOCK | TFD_CLOEXEC);
if (timerfd == -1) {
return NULL;
@@ -398,7 +408,7 @@ struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough, unsi
}
#else
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(1, sizeof(struct us_internal_callback_t) + ext_size);
struct us_internal_callback_t *cb = us_malloc(sizeof(struct us_internal_callback_t) + ext_size);
cb->loop = loop;
cb->cb_expects_the_loop = 0;
@@ -458,7 +468,7 @@ void us_timer_close(struct us_timer_t *timer, int fallthrough) {
struct kevent64_s event;
EV_SET64(&event, (uint64_t) (void*) internal_cb, EVFILT_TIMER, EV_DELETE, 0, 0, (uint64_t)internal_cb, 0, 0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
/* (regular) sockets are the only polls which are not freed immediately */
if(fallthrough){
@@ -477,7 +487,7 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
struct kevent64_s event;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_TIMER, EV_ADD | (repeat_ms ? 0 : EV_ONESHOT), 0, ms, (uint64_t)internal_cb, 0, 0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
}
#endif
@@ -485,8 +495,6 @@ void us_timer_set(struct us_timer_t *t, void (*cb)(struct us_timer_t *t), int ms
#ifdef LIBUS_USE_EPOLL
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_poll_t *p = us_create_poll(loop, fallthrough, sizeof(struct us_internal_callback_t) + ext_size);
memset(p, 0, sizeof(struct us_internal_callback_t) + ext_size);
us_poll_init(p, eventfd(0, EFD_NONBLOCK | EFD_CLOEXEC), POLL_TYPE_CALLBACK);
struct us_internal_callback_t *cb = (struct us_internal_callback_t *) p;
@@ -526,7 +534,8 @@ void us_internal_async_wakeup(struct us_internal_async *a) {
#define MACHPORT_BUF_LEN 1024
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop, int fallthrough, unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(1, sizeof(struct us_internal_callback_t) + ext_size);
struct us_internal_callback_t *cb = us_malloc(sizeof(struct us_internal_callback_t) + ext_size);
cb->loop = loop;
cb->cb_expects_the_loop = 1;
cb->leave_poll_ready = 0;
@@ -556,7 +565,7 @@ void us_internal_async_close(struct us_internal_async *a) {
struct kevent64_s event;
uint64_t ptr = (uint64_t)(void*)internal_cb;
EV_SET64(&event, ptr, EVFILT_MACHPORT, EV_DELETE, 0, 0, (uint64_t)(void*)internal_cb, 0,0);
kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
mach_port_deallocate(mach_task_self(), internal_cb->port);
us_free(internal_cb->machport_buf);
@@ -584,7 +593,7 @@ void us_internal_async_set(struct us_internal_async *a, void (*cb)(struct us_int
event.ext[1] = MACHPORT_BUF_LEN;
event.udata = (uint64_t)(void*)internal_cb;
int ret = kevent64(internal_cb->loop->fd, &event, 1, &event, 1, KEVENT_FLAG_ERROR_EVENTS, NULL);
int ret = kevent64(internal_cb->loop->fd, &event, 1, NULL, 0, 0, NULL);
if (UNLIKELY(ret == -1)) {
abort();

View File

@@ -91,10 +91,6 @@ void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
((events & LIBUS_SOCKET_WRITABLE) ? POLL_TYPE_POLLING_OUT : 0);
uv_poll_init_socket(loop->uv_loop, p->uv_p, p->fd);
// This unref is okay in the context of Bun's event loop, because sockets have
// a `Async.KeepAlive` associated with them, which is used instead of the
// usockets internals. usockets doesnt have a notion of ref-counted handles.
uv_unref((uv_handle_t *)p->uv_p);
uv_poll_start(p->uv_p, events, poll_cb);
}
@@ -201,7 +197,6 @@ void us_loop_free(struct us_loop_t *loop) {
void us_loop_run(struct us_loop_t *loop) {
us_loop_integrate(loop);
uv_update_time(loop->uv_loop);
uv_run(loop->uv_loop, UV_RUN_ONCE);
}
@@ -215,7 +210,7 @@ struct us_poll_t *us_create_poll(struct us_loop_t *loop, int fallthrough,
return p;
}
/* If we update our block position we have to update the uv_poll data to point
/* If we update our block position we have to updarte the uv_poll data to point
* to us */
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop,
unsigned int ext_size) {
@@ -229,8 +224,8 @@ struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop,
// timer
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough,
unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(
1, sizeof(struct us_internal_callback_t) + sizeof(uv_timer_t) + ext_size);
struct us_internal_callback_t *cb = malloc(
sizeof(struct us_internal_callback_t) + sizeof(uv_timer_t) + ext_size);
cb->loop = loop;
cb->cb_expects_the_loop = 0; // never read?
@@ -292,8 +287,8 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop,
int fallthrough,
unsigned int ext_size) {
struct us_internal_callback_t *cb = us_calloc(
1, sizeof(struct us_internal_callback_t) + sizeof(uv_async_t) + ext_size);
struct us_internal_callback_t *cb = malloc(
sizeof(struct us_internal_callback_t) + sizeof(uv_async_t) + ext_size);
cb->loop = loop;
return (struct us_internal_async *)cb;

View File

@@ -289,8 +289,8 @@ struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
int port, int options, int socket_ext_size);
struct us_listen_socket_t *us_internal_ssl_socket_context_listen_unix(
struct us_internal_ssl_socket_context_t *context, const char *path,
size_t pathlen, int options, int socket_ext_size);
struct us_internal_ssl_socket_context_t *context, const char *path,
int options, int socket_ext_size);
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
struct us_internal_ssl_socket_context_t *context, const char *host,
@@ -298,7 +298,7 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
struct us_internal_ssl_socket_context_t *context, const char *server_path,
size_t pathlen, int options, int socket_ext_size);
int options, int socket_ext_size);
int us_internal_ssl_socket_write(struct us_internal_ssl_socket_t *s,
const char *data, int length, int msg_more);

View File

@@ -97,14 +97,14 @@ int bsd_would_block();
// listen both on ipv6 and ipv4
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket(const char *host, int port, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, size_t pathlen, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_listen_socket_unix(const char *path, int options);
/* Creates an UDP socket bound to the hostname and port */
LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port);
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket(const char *host, int port, const char *source_host, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, size_t pathlen, int options);
LIBUS_SOCKET_DESCRIPTOR bsd_create_connect_socket_unix(const char *server_path, int options);
#ifndef MSG_DONTWAIT
#define MSG_DONTWAIT 0

View File

@@ -16,10 +16,6 @@
*/
// clang-format off
#ifndef us_calloc
#define us_calloc calloc
#endif
#ifndef us_malloc
#define us_malloc malloc
#endif
@@ -253,7 +249,7 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
const char *host, int port, int options, int socket_ext_size);
struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_socket_context_t *context,
const char *path, size_t pathlen, int options, int socket_ext_size);
const char *path, int options, int socket_ext_size);
/* listen_socket.c/.h */
void us_listen_socket_close(int ssl, struct us_listen_socket_t *ls);
@@ -263,7 +259,7 @@ struct us_socket_t *us_socket_context_connect(int ssl, struct us_socket_context_
const char *host, int port, const char *source_host, int options, int socket_ext_size);
struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_context_t *context,
const char *server_path, size_t pathlen, int options, int socket_ext_size);
const char *server_path, int options, int socket_ext_size);
/* Is this socket established? Can be used to check if a connecting socket has fired the on_open event yet.
* Can also be used to determine if a socket is a listen_socket or not, but you probably know that already. */
@@ -404,9 +400,6 @@ int us_raw_root_certs(struct us_cert_string_t**out);
unsigned int us_get_remote_address_info(char *buf, struct us_socket_t *s, const char **dest, int *port, int *is_ipv6);
int us_socket_get_error(int ssl, struct us_socket_t *s);
void us_socket_ref(struct us_socket_t *s);
void us_socket_unref(struct us_socket_t *s);
#ifdef __cplusplus
}
#endif

View File

@@ -387,17 +387,3 @@ unsigned int us_get_remote_address_info(char *buf, struct us_socket_t *s, const
return length;
}
void us_socket_ref(struct us_socket_t *s) {
#ifdef LIBUS_USE_LIBUV
uv_ref((uv_handle_t*)s->p.uv_p);
#endif
// do nothing if not using libuv
}
void us_socket_unref(struct us_socket_t *s) {
#ifdef LIBUS_USE_LIBUV
uv_unref((uv_handle_t*)s->p.uv_p);
#endif
// do nothing if not using libuv
}

View File

@@ -27,7 +27,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL

View File

@@ -20,12 +20,11 @@ jobs:
language: c++
fuzz-seconds: 600
- name: Upload crash
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: failure() && steps.build.outcome == 'success'
with:
name: artifacts
path: ./out/artifacts
if-no-files-found: "error"
build_windows:
runs-on: windows-latest

View File

@@ -575,13 +575,13 @@ public:
/* options, callback, path to unix domain socket */
TemplatedApp &&listen(int options, MoveOnlyFunction<void(us_listen_socket_t *)> &&handler, std::string path) {
handler(httpContext ? httpContext->listen_unix(path.data(), path.length(), options) : nullptr);
handler(httpContext ? httpContext->listen(path.c_str(), options) : nullptr);
return std::move(*this);
}
/* callback, path to unix domain socket */
TemplatedApp &&listen(MoveOnlyFunction<void(us_listen_socket_t *)> &&handler, std::string path, int options) {
handler(httpContext ? httpContext->listen_unix(path.data(), path.length(), options) : nullptr);
TemplatedApp &&listen(MoveOnlyFunction<void(us_listen_socket_t *)> &&handler, std::string path) {
handler(httpContext ? httpContext->listen(path.c_str(), 0) : nullptr);
return std::move(*this);
}

View File

@@ -1,4 +1,3 @@
// clang-format off
/*
* Authored by Alex Hultman, 2018-2020.
* Intellectual property of third-party.
@@ -135,8 +134,6 @@ private:
/* Handle HTTP data streams */
us_socket_context_on_data(SSL, getSocketContext(), [](us_socket_t *s, char *data, int length) {
// ref the socket to make sure we process it entirely before it is closed
us_socket_ref(s);
// total overhead is about 210k down to 180k
// ~210k req/sec is the original perf with write in data
@@ -296,10 +293,6 @@ private:
/* We need to uncork in all cases, except for nullptr (closed socket, or upgraded socket) */
if (returnedSocket != nullptr) {
us_socket_t* returnedSocketPtr = (us_socket_t*) returnedSocket;
/* We don't want open sockets to keep the event loop alive between HTTP requests */
us_socket_unref(returnedSocketPtr);
/* Timeout on uncork failure */
auto [written, failed] = ((AsyncSocket<SSL> *) returnedSocket)->uncork();
if (failed) {
@@ -319,7 +312,8 @@ private:
}
}
}
return returnedSocketPtr;
return (us_socket_t *) returnedSocket;
}
/* If we upgraded, check here (differ between nullptr close and nullptr upgrade) */
@@ -498,23 +492,12 @@ public:
/* Listen to port using this HttpContext */
us_listen_socket_t *listen(const char *host, int port, int options) {
auto socket = us_socket_context_listen(SSL, getSocketContext(), host, port, options, sizeof(HttpResponseData<SSL>));
// we dont depend on libuv ref for keeping it alive
if (socket) {
us_socket_unref(&socket->s);
}
return socket;
return us_socket_context_listen(SSL, getSocketContext(), host, port, options, sizeof(HttpResponseData<SSL>));
}
/* Listen to unix domain socket using this HttpContext */
us_listen_socket_t *listen_unix(const char *path, size_t pathlen, int options) {
auto* socket = us_socket_context_listen_unix(SSL, getSocketContext(), path, pathlen, options, sizeof(HttpResponseData<SSL>));
// we dont depend on libuv ref for keeping it alive
if (socket) {
us_socket_unref(&socket->s);
}
return socket;
us_listen_socket_t *listen(const char *path, int options) {
return us_socket_context_listen_unix(SSL, getSocketContext(), path, options, sizeof(HttpResponseData<SSL>));
}
};

View File

@@ -207,7 +207,7 @@ namespace uWS
/* This guy really has only 30 bits since we reserve two highest bits to chunked encoding parsing state */
uint64_t remainingStreamingBytes = 0;
const size_t MAX_FALLBACK_SIZE = 1024 * 8;
const size_t MAX_FALLBACK_SIZE = 1024 * 4;
/* Returns UINT_MAX on error. Maximum 999999999 is allowed. */
static uint64_t toUnsignedInteger(std::string_view str) {
@@ -227,6 +227,18 @@ namespace uWS
return unsignedIntegerValue;
}
/* RFC 9110 16.3.1 Field Name Registry (TLDR; alnum + hyphen is allowed)
* [...] It MUST conform to the field-name syntax defined in Section 5.1,
* and it SHOULD be restricted to just letters, digits,
* and hyphen ('-') characters, with the first character being a letter. */
static inline bool isFieldNameByte(unsigned char x)
{
return (x == '-') |
((x > '/') & (x < ':')) |
((x > '@') & (x < '[')) |
((x > 96) & (x < '{'));
}
static inline uint64_t hasLess(uint64_t x, uint64_t n)
{
return (((x) - ~0ULL / 255 * (n)) & ~(x) & ~0ULL / 255 * 128);
@@ -251,56 +263,23 @@ namespace uWS
hasMore(x, 'z');
}
/* RFC 9110 5.6.2. Tokens */
/* Hyphen is not checked here as it is very common */
static inline bool isUnlikelyFieldNameByte(unsigned char c)
static inline void *consumeFieldName(char *p)
{
/* Digits and 14 of the 15 non-alphanum characters (lacking hyphen) */
return ((c == '~') | (c == '|') | (c == '`') | (c == '_') | (c == '^') | (c == '.') | (c == '+')
| (c == '*') | (c == '!')) || ((c >= 48) & (c <= 57)) || ((c <= 39) & (c >= 35));
}
static inline bool isFieldNameByteFastLowercased(unsigned char &in) {
/* Most common is lowercase alpha and hyphen */
if (((in >= 97) & (in <= 122)) | (in == '-')) [[likely]] {
return true;
/* Second is upper case alpha */
} else if ((in >= 65) & (in <= 90)) [[unlikely]] {
in |= 32;
return true;
/* These are rarely used but still valid */
} else if (isUnlikelyFieldNameByte(in)) [[unlikely]] {
return true;
}
return false;
}
static inline void *consumeFieldName(char *p) {
/* Best case fast path (particularly useful with clang) */
while (true) {
while ((*p >= 65) & (*p <= 90)) [[likely]] {
*p |= 32;
p++;
}
while (((*p >= 97) & (*p <= 122))) [[likely]] {
p++;
}
if (*p == ':') {
for (; true; p += 8)
{
uint64_t word;
memcpy(&word, p, sizeof(uint64_t));
if (notFieldNameWord(word))
{
while (isFieldNameByte(*(unsigned char *)p))
{
*(p++) |= 0x20;
}
return (void *)p;
}
if (*p == '-') {
p++;
} else if (!((*p >= 65) & (*p <= 90))) {
/* Exit fast path parsing */
break;
}
word |= 0x2020202020202020ull;
memcpy(p, &word, sizeof(uint64_t));
}
/* Generic */
while (isFieldNameByteFastLowercased(*(unsigned char *)p)) {
p++;
}
return (void *)p;
}
/* Puts method as key, target as value and returns non-null (or nullptr on error). */
@@ -513,11 +492,6 @@ namespace uWS
length -= consumed;
consumedTotal += consumed;
/* Even if we could parse it, check for length here as well */
if (consumed > MAX_FALLBACK_SIZE) {
return {0, FULLPTR};
}
/* Store HTTP version (ancient 1.0 or 1.1) */
req->ancientHttp = isAncientHttp;

BIN
packages/bun-vscode/bun.lockb Executable file → Normal file

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More