Compare commits

..

1 Commits

Author SHA1 Message Date
Dylan Conway
4505be7ff9 only if dir exists 2023-04-17 17:13:01 -07:00
884 changed files with 67619 additions and 165668 deletions

23
.gitattributes vendored
View File

@@ -8,26 +8,3 @@ src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
src/js/out/WebCoreJSBuiltins.h linguist-generated
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
src/bun.js/bindings/JSSink.h linguist-generated
src/bun.js/bindings/JSSink.zig linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation

View File

@@ -12,7 +12,6 @@ jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

View File

@@ -1,50 +0,0 @@
name: bun-ecosystem-test
on:
schedule:
- cron: "0 15 * * *" # every day at 7am PST
workflow_dispatch:
inputs:
version:
description: "The version of Bun to run"
required: true
default: "canary"
type: string
jobs:
test:
name: ${{ matrix.tag }}
runs-on: ${{ matrix.os }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
tag: linux-x64
url: linux/x64?avx2=true
- os: ubuntu-latest
tag: linux-x64-baseline
url: linux/x64?baseline=true
# FIXME: runner fails with "No tests found"?
#- os: macos-latest
# tag: darwin-x64
# url: darwin/x64?avx2=true
- os: macos-latest
tag: darwin-x64-baseline
url: darwin/x64?baseline=true
steps:
- id: checkout
name: Checkout
uses: Bhacaz/checkout-files@v2
with:
files: packages/bun-internal-test
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
- id: test
name: Test
working-directory: packages/bun-internal-test
run: bun run test:ecosystem

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -26,7 +21,6 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -36,7 +30,7 @@ jobs:
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -35,7 +30,6 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
fail-fast: false
@@ -46,7 +40,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
@@ -54,7 +48,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64

View File

@@ -1,9 +1,4 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -117,7 +110,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -126,7 +119,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
@@ -135,7 +128,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -144,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -152,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -244,7 +237,6 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
@@ -257,7 +249,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -265,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -396,7 +388,7 @@ jobs:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -117,7 +110,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -126,7 +119,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
@@ -135,7 +128,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -144,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -152,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -245,7 +238,6 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
@@ -258,7 +250,7 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -266,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -400,8 +392,8 @@ jobs:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -117,7 +110,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -126,7 +119,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
@@ -135,7 +128,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -144,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -152,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -247,7 +240,6 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
strategy:
@@ -260,7 +252,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -268,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -402,8 +394,8 @@ jobs:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}

View File

@@ -8,7 +8,6 @@ jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -41,7 +40,6 @@ jobs:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -110,7 +108,6 @@ jobs:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
@@ -152,7 +149,6 @@ jobs:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release

View File

@@ -11,7 +11,6 @@ jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types

View File

@@ -14,7 +14,6 @@ jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -54,7 +53,6 @@ jobs:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release
@@ -87,7 +85,6 @@ jobs:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
@@ -130,7 +127,6 @@ jobs:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
@@ -181,7 +177,6 @@ jobs:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
steps:
- id: checkout
name: Checkout
@@ -223,7 +218,6 @@ jobs:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-release

34
.github/workflows/bun-typecheck.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: typecheck
on:
push:
branches: [main]
paths:
- "packages/bun-types/**"
- "test/**"
pull_request:
branches:
- main
paths:
- "packages/bun-types/**"
- "test/**"
jobs:
tests:
name: check-tests
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install dependencies
run: bun install
- name: Install dependencies
run: bun install
working-directory: test
- name: Typecheck tests
run: bun run typecheck

59
.github/workflows/run-test-manually.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
name: Run Tests Manually
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
version:
description: "Version"
required: true
default: "canary"
type: string
use_bun:
description: "Use Bun?"
required: true
default: true
type: boolean
jobs:
linux-test:
name: Tests ${{matrix.tag}} ${{github.event.inputs.version}}
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: install-npm
name: Install (npm)
run: |
npm install @oven/bun-${{matrix.tag}}@${{github.event.inputs.version}}
chmod +x node_modules/@oven/bun-${{matrix.tag}}/bin/bun
sudo cp node_modules/@oven/bun-${{matrix.tag}}/bin/bun /usr/bin/bun
- id: test
name: Test
if: ${{github.event.inputs.use_bun == 'true'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
bun run test
- id: test-node-runner
name: Test (node runner)
if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
node src/runner.node.mjs

7
.gitignore vendored
View File

@@ -13,7 +13,7 @@ dist
*.log
*.out.js
*.out.refresh.js
**/package-lock.json
/package-lock.json
build
*.wat
zig-out
@@ -116,8 +116,3 @@ src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/test.ts

4
.gitmodules vendored
View File

@@ -65,7 +65,3 @@ fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty

View File

@@ -1,6 +1,6 @@
src/fallback.html
src/bun.js/WebKit
src/js/out
src/bun.js/builtins/js
src/*.out.js
src/*out.*.js
src/deps
@@ -10,4 +10,3 @@ test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts
bench/react-hello-world/react-hello-world.node.js

View File

@@ -15,15 +15,15 @@
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"browse": {
"path": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
@@ -33,14 +33,12 @@
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/builtins/*",
"${workspaceFolder}/src/bun.js/builtins/cpp/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb"

244
.vscode/launch.json generated vendored
View File

@@ -1,209 +1,129 @@
{
// The usage of BUN_GARBAGE_COLLECTOR_LEVEL=2 is important for debugging
// It will force the garbage collector to run after every test and every call to expect()
// it makes our tests very slow
// But it helps catch memory bugs
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits:
// { "initCommands": ["process handle -p false -s false -n false SIGHUP"] }
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "bun test [file]",
"name": "bun test",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"name": "bun test --watch",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["--watch", "test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test (all)",
"program": "bun-debug",
"args": ["wiptest"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run current file",
"program": "bun-debug",
"args": ["${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (watch)",
"program": "bun-debug",
"args": ["--watch", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (hot)",
"program": "bun-debug",
"args": ["--hot", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run",
"program": "bun-debug",
"args": ["check.tsx", "-c"],
"cwd": "${env:HOME}/Build/react-ssr",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"name": "bun http example",
"program": "bun-debug",
"args": ["test", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/http.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"name": "bun http file example",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*]",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file]",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [Inspect]",
"program": "bun-debug",
"args": ["--inspect-brk", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (gc)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (verbose)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/http-file.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --watch",
"name": "bun html-rewriter example",
"program": "bun-debug",
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/html-rewriter.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --hot",
"program": "bun-debug",
"args": ["run", "--hot", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -218,14 +138,12 @@
"request": "launch",
"name": "bun build debug",
"program": "bun-debug",
"args": ["bun", "${file}"],
"args": ["bun", "./test/fixtures/bundle/trivial/index.js"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
},
// SIGHUP must be ignored or the debugger will pause when a spawned subprocess exits.
"initCommands": ["process handle -p false -s false -n false SIGHUP"]
}
},
{
"type": "lldb",
@@ -283,7 +201,7 @@
"--splitting",
"bun",
"/Users/jarred/Code/bun-rsc/components/Message.tsx",
"/Users/jarred/Code/bun-rsc/components/Button.tsx"
"/Users/jarred/Code/bun-rsc/components/Button.tsx",
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",

19
.vscode/settings.json vendored
View File

@@ -37,6 +37,8 @@
"[yaml]": {
"editor.formatOnSave": true
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"[markdown]": {
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
@@ -75,9 +77,7 @@
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true
"test/snippets/package-json-exports/_node_modules_copy": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -204,18 +204,7 @@
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp"
"__verbose_abort": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",

View File

@@ -59,9 +59,7 @@ The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loade
### JavaScript Builtins
TODO: update this with the new build process that uses TypeScript and `$` instead of `@`.
JavaScript builtins are located in [`src/js/builtins/*.ts`](src/js/builtins).
JavaScript builtins are located in [`src/bun.js/builtins/*.js`](src/bun.js/builtins).
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).

View File

@@ -10,7 +10,7 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=may20
ARG WEBKIT_TAG=feb9
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
@@ -503,33 +503,6 @@ ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
FROM scratch as build_release_cpp
COPY --from=compile_cpp /tmp/*.o /
@@ -562,7 +535,6 @@ COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/

184
Makefile
View File

@@ -20,7 +20,6 @@ CPU_TARGET ?= native
MARCH_NATIVE = -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH =
MMD_IF_LOCAL =
DEFAULT_MIN_MACOS_VERSION=
ARCH_NAME :=
DOCKER_BUILDARCH =
@@ -42,14 +41,8 @@ endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 0.6
CI ?= false
AR=
ifeq ($(CI), false)
MMD_IF_LOCAL = -MMD
endif
BUN_OR_NODE = $(shell which bun 2>/dev/null || which node 2>/dev/null)
CXX_VERSION=c++2a
@@ -84,7 +77,6 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-15 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-15 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-15 2>/dev/null || which clang-format 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -181,7 +173,7 @@ AR = $(shell which llvm-ar-15 2>/dev/null || which llvm-ar 2>/dev/null || which
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE)
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_TMP_DIR := /tmp/make-bun
@@ -193,13 +185,14 @@ DEFAULT_USE_BMALLOC := 1
USE_BMALLOC ?= DEFAULT_USE_BMALLOC
# Set via postinstall
ifeq (,$(realpath $(JSC_BASE_DIR)))
JSC_BASE_DIR = $(realpath $(firstword $(wildcard bun-webkit)))
ifeq (,$(JSC_BASE_DIR))
JSC_BASE_DIR = $(HOME)/webkit-build
endif
AUTO_JSX_BASE_DIR ?= $(realpath $(firstword $(wildcard bun-webkit)))
ifeq (,$(AUTO_JSX_BASE_DIR))
AUTO_JSX_BASE_DIR ?= $(HOME)/webkit-build
endif
JSC_BASE_DIR ?= $(AUTO_JSX_BASE_DIR)
DEFAULT_JSC_LIB :=
DEFAULT_JSC_LIB_DEBUG :=
@@ -293,14 +286,14 @@ SRC_WEBCORE_FILES := $(wildcard $(SRC_DIR)/webcore/*.cpp)
SRC_SQLITE_FILES := $(wildcard $(SRC_DIR)/sqlite/*.cpp)
SRC_NODE_OS_FILES := $(wildcard $(SRC_DIR)/node_os/*.cpp)
SRC_IO_FILES := $(wildcard src/io/*.cpp)
SRC_BUILTINS_FILES := $(wildcard src/js/out/*.cpp)
SRC_BUILTINS_FILES := $(wildcard src/bun.js/builtins/*.cpp)
SRC_WEBCRYPTO_FILES := $(wildcard $(SRC_DIR)/webcrypto/*.cpp)
OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(OBJ_DIR)/%.o,$(SRC_FILES))
WEBCORE_OBJ_FILES := $(patsubst $(SRC_DIR)/webcore/%.cpp,$(OBJ_DIR)/%.o,$(SRC_WEBCORE_FILES))
SQLITE_OBJ_FILES := $(patsubst $(SRC_DIR)/sqlite/%.cpp,$(OBJ_DIR)/%.o,$(SRC_SQLITE_FILES))
NODE_OS_OBJ_FILES := $(patsubst $(SRC_DIR)/node_os/%.cpp,$(OBJ_DIR)/%.o,$(SRC_NODE_OS_FILES))
BUILTINS_OBJ_FILES := $(patsubst src/js/out/%.cpp,$(OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
BUILTINS_OBJ_FILES := $(patsubst src/bun.js/builtins/%.cpp,$(OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
IO_FILES := $(patsubst src/io/%.cpp,$(OBJ_DIR)/%.o,$(SRC_IO_FILES))
MODULES_OBJ_FILES := $(patsubst $(MODULES_DIR)/%.cpp,$(OBJ_DIR)/%.o,$(MODULES_FILES))
WEBCRYPTO_OBJ_FILES := $(patsubst $(SRC_DIR)/webcrypto/%.cpp,$(OBJ_DIR)/%.o,$(SRC_WEBCRYPTO_FILES))
@@ -309,7 +302,7 @@ DEBUG_OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_FILES)
DEBUG_WEBCORE_OBJ_FILES := $(patsubst $(SRC_DIR)/webcore/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_WEBCORE_FILES))
DEBUG_SQLITE_OBJ_FILES := $(patsubst $(SRC_DIR)/sqlite/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_SQLITE_FILES))
DEBUG_NODE_OS_OBJ_FILES := $(patsubst $(SRC_DIR)/node_os/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_NODE_OS_FILES))
DEBUG_BUILTINS_OBJ_FILES := $(patsubst src/js/out/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
DEBUG_BUILTINS_OBJ_FILES := $(patsubst src/bun.js/builtins/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
DEBUG_IO_FILES := $(patsubst src/io/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_IO_FILES))
DEBUG_MODULES_OBJ_FILES := $(patsubst $(MODULES_DIR)/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(MODULES_FILES))
DEBUG_WEBCRYPTO_OBJ_FILES := $(patsubst $(SRC_DIR)/webcrypto/%.cpp, $(DEBUG_OBJ_DIR)/%.o, $(SRC_WEBCRYPTO_FILES))
@@ -330,12 +323,12 @@ ALL_JSC_INCLUDE_DIRS := -I$(WEBKIT_RELEASE_DIR)/WTF/Headers \
-I$(WEBKIT_RELEASE_DIR)/WTF/PrivateHeaders
SHARED_INCLUDE_DIR = -I$(realpath src/bun.js/bindings)/ \
-I$(realpath src/js/builtins/) \
-I$(realpath src/js/out/) \
-I$(realpath src/bun.js/builtins/) \
-I$(realpath src/bun.js/bindings) \
-I$(realpath src/bun.js/bindings/webcore) \
-I$(realpath src/bun.js/bindings/webcrypto) \
-I$(realpath src/bun.js/bindings/sqlite) \
-I$(realpath src/bun.js/builtins/cpp) \
-I$(realpath src/bun.js/bindings/node_os) \
-I$(realpath src/bun.js/modules) \
-I$(JSC_INCLUDE_DIR)
@@ -352,7 +345,7 @@ LINUX_INCLUDE_DIRS := $(ALL_JSC_INCLUDE_DIRS) \
UWS_INCLUDE_DIR := -I$(BUN_DEPS_DIR)/uws/uSockets/src -I$(BUN_DEPS_DIR)/uws/src -I$(BUN_DEPS_DIR)
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
ifeq ($(OS_NAME),linux)
@@ -460,7 +453,6 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
-ltcc \
-lusockets \
-lcares \
-lzstd \
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
@@ -527,16 +519,14 @@ bun:
npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
npm-install-dev:
$(NPM_CLIENT) install
cd test && $(NPM_CLIENT) install
cd packages/bun-types && $(NPM_CLIENT) install --production
print-% : ; @echo $* = $($*)
get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
# Prevent dependency on libtcc1 so it doesn't do filesystem lookups
TINYCC_CFLAGS= -DTCC_LIBTCC1=\"\0\"
@@ -554,12 +544,24 @@ tinycc:
PYTHON=$(shell which python 2>/dev/null || which python3 2>/dev/null || which python2 2>/dev/null)
.PHONY: builtins
builtins:
NODE_ENV=production bun src/js/builtins/codegen/index.ts --minify
.PHONY: esm
esm:
NODE_ENV=production bun src/js/build-esm.ts
builtins: ## to generate builtins
@if [ -z "$(WEBKIT_DIR)" ] || [ ! -d "$(WEBKIT_DIR)/Source" ]; then echo "WebKit is not cloned. Please run make init-submodules"; exit 1; fi
rm -f src/bun.js/bindings/*Builtin*.cpp src/bun.js/bindings/*Builtin*.h src/bun.js/bindings/*Builtin*.cpp
rm -rf src/bun.js/builtins/cpp
mkdir -p src/bun.js/builtins/cpp
$(PYTHON) $(realpath $(WEBKIT_DIR)/Source/JavaScriptCore/Scripts/generate-js-builtins.py) -i $(realpath src)/bun.js/builtins/js -o $(realpath src)/bun.js/builtins/cpp --framework WebCore --force
$(PYTHON) $(realpath $(WEBKIT_DIR)/Source/JavaScriptCore/Scripts/generate-js-builtins.py) -i $(realpath src)/bun.js/builtins/js -o $(realpath src)/bun.js/builtins/cpp --framework WebCore --wrappers-only
rm -rf /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
echo -e '// clang-format off\nnamespace Zig { class GlobalObject; }\n#include "root.h"\n' >> /tmp/1.h
cat /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h > src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
mv src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1 src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h
rm -rf /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
echo -e '// clang-format off\nnamespace Zig { class GlobalObject; }\n#include "root.h"\n' >> /tmp/1.h
cat /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp > src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp.1
mv src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp.1 src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp
$(SED) -i -e 's/class JSDOMGlobalObject/using JSDOMGlobalObject = Zig::GlobalObject/' src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h
# this is the one we actually build
mv src/bun.js/builtins/cpp/*JSBuiltin*.cpp src/bun.js/builtins
.PHONY: generate-builtins
generate-builtins: builtins
@@ -630,9 +632,6 @@ compile-ffi-test:
sqlite:
.PHONY: zstd
zstd:
cd $(BUN_DEPS_DIR)/zstd && rm -rf build-cmake-debug && cmake $(CMAKE_FLAGS) -DZSTD_BUILD_STATIC=ON -B build-cmake-debug -S build/cmake -G Ninja && ninja -C build-cmake-debug && cp build-cmake-debug/lib/libzstd.a $(BUN_DEPS_OUT_DIR)/libzstd.a
.PHONY: libarchive
libarchive:
@@ -677,7 +676,6 @@ require:
@which aclocal > /dev/null || (echo -e "ERROR: automake is required. Install with:\n\n $(POSIX_PKG_MANAGER) install automake"; exit 1)
@which $(LIBTOOL) > /dev/null || (echo -e "ERROR: libtool is required. Install with:\n\n $(POSIX_PKG_MANAGER) install libtool"; exit 1)
@which ninja > /dev/null || (echo -e "ERROR: Ninja is required. Install with:\n\n $(POSIX_PKG_MANAGER) install $(PKGNAME_NINJA)"; exit 1)
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
@echo "You have the dependencies installed! Woo"
init-submodules:
@@ -687,10 +685,6 @@ init-submodules:
build-obj:
$(ZIG) build obj -Doptimize=ReleaseFast -Dcpu="$(CPU_TARGET)"
.PHONY: build-obj-small
build-obj-small:
$(ZIG) build obj -Doptimize=ReleaseSmall -Dcpu="$(CPU_TARGET)"
.PHONY: dev-build-obj-wasm
dev-build-obj-wasm:
$(ZIG) build bun-wasm -Dtarget=wasm32-freestanding
@@ -769,10 +763,10 @@ sign-macos-aarch64:
gon sign.macos-aarch64.json
cls:
@echo -e "\n\n---\n\n"
@echo "\n\n---\n\n"
jsc-check:
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_INCLUDE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit include directory at $(JSC_INCLUDE_DIR)." && exit 1)
@ls $(JSC_LIB) >/dev/null 2>&1 || (echo "Failed to access WebKit lib directory at $(JSC_LIB)." && exit 1)
@@ -795,7 +789,7 @@ release-safe: prerelease release-safe-only
.PHONY: fmt-cpp
fmt-cpp:
cd src/bun.js/bindings && $(CLANG_FORMAT) *.cpp *.h -i
cd src/bun.js/bindings && clang-format *.cpp *.h -i
.PHONY: fmt-zig
fmt-zig:
@@ -910,6 +904,7 @@ bun-codesign-release-local:
bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build jsc-copy-headers jsc-bindings
.PHONY: jsc-build
@@ -921,6 +916,7 @@ jsc-bindings: headers bindings
clone-submodules:
git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress
CLANG_FORMAT := $(shell command -v clang-format 2> /dev/null)
.PHONY: headers
headers:
@@ -1085,7 +1081,7 @@ dev-obj-linux:
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
.PHONY: dev
dev: mkdir-dev esm dev-obj bun-link-lld-debug
dev: mkdir-dev dev-obj bun-link-lld-debug
mkdir-dev:
mkdir -p $(DEBUG_PACKAGE_DIR)
@@ -1115,6 +1111,7 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ObjectPropertyConditionSet.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ObjectPropertyConditionSet.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PolyProtoAccessChain.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PolyProtoAccessChain.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/InlineCacheCompiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/InlineCacheCompiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PutKind.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PutKind.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StructureStubClearingWatchpoint.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StructureStubClearingWatchpoint.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/AdaptiveInferredPropertyValueWatchpointBase.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AdaptiveInferredPropertyValueWatchpointBase.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
@@ -1148,9 +1145,6 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AsyncFunctionPrototype.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AsyncFunctionPrototype.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
# This is a workaround for a JSC bug that impacts aarch64
@@ -1300,7 +1294,7 @@ jsc-bindings-mac: bindings
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_TRACK_VALGRIND=ON
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
@@ -1322,9 +1316,8 @@ mimalloc-debug:
-DMI_OVERRIDE=OFF \
-DCMAKE_C_FLAGS="$(CFLAGS)" \
-DCMAKE_CXX_FLAGS="$(CFLAGS)" \
-GNinja \
. \
&& ninja
&& make -j $(CPUS);
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
@@ -1346,9 +1339,8 @@ mimalloc:
-DMI_OVERRIDE=OFF \
-DMI_OSX_ZONE=OFF \
-DCMAKE_C_FLAGS="$(CFLAGS)" \
-GNinja \
. \
&& ninja;
.\
&& make -j $(CPUS);
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
@@ -1383,19 +1375,6 @@ bun-relink-copy:
mkdir -p $(PACKAGE_DIR)
cp $(BUN_DEPLOY_DIR).o $(BUN_RELEASE_BIN).o
.PHONY: bun-link-lld-profile profile
bun-link-lld-profile:
$(CXX) $(BUN_LLD_FLAGS) $(SYMBOLS) -g -gdwarf-4 -fno-omit-frame-pointer \
$(BUN_RELEASE_BIN).o \
-o $(BUN_RELEASE_BIN) \
-W \
$(OPTIMIZATION_LEVEL) $(RELEASE_FLAGS)
rm -rf $(BUN_RELEASE_BIN).dSYM
cp $(BUN_RELEASE_BIN) $(BUN_RELEASE_BIN)-profile
@rm -f $(BUN_RELEASE_BIN).o.o # workaround for https://github.com/ziglang/zig/issues/14080
build-profile: build-obj bun-link-lld-profile bun-codesign-release-local
bun-link-lld-release:
$(CXX) $(BUN_LLD_FLAGS) $(SYMBOLS) \
$(BUN_RELEASE_BIN).o \
@@ -1446,11 +1425,11 @@ wasm-return1:
generate-classes:
bun src/bun.js/scripts/generate-classes.ts
$(ZIG) fmt src/bun.js/bindings/generated_classes.zig
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
clang-format -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
generate-sink:
bun src/bun.js/scripts/generate-jssink.js
$(CLANG_FORMAT) -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
clang-format -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
$(WEBKIT_DIR)/Source/JavaScriptCore/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/namespace JSC {//' src/bun.js/bindings/JSSinkLookupTable.h
@@ -1474,7 +1453,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1485,7 +1464,7 @@ $(OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1496,7 +1475,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1507,7 +1486,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1518,7 +1497,7 @@ $(OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1529,18 +1508,18 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
$(OBJ_DIR)/%.o: src/js/out/%.cpp
$(OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1552,7 +1531,7 @@ $(OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1566,7 +1545,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1581,7 +1560,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1594,7 +1573,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1610,7 +1589,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1625,7 +1604,7 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1635,12 +1614,12 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: src/js/out/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/js/out/%.cpp
.PHONY: src/bun.js/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1653,7 +1632,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
@@ -1667,7 +1646,7 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-I$(SRC_DIR) \
-fno-rtti \
@@ -1842,34 +1821,12 @@ copy-to-bun-release-dir-bin:
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
.PHONY: cold-jsc-start
cold-jsc-start:
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(LIBICONV_PATH) \
$(DEFAULT_LINKER_FLAGS) \
$(PLATFORM_LINKER_FLAGS) \
$(ICU_FLAGS) \
$(JSC_FILES) \
misctools/cold-jsc-start.cpp -o cold-jsc-start
.PHONY: vendor-without-npm
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd
.PHONY: vendor-without-check
vendor-without-check: npm-install vendor-without-npm
vendor-without-check: npm-install node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares
.PHONY: vendor
vendor: require init-submodules vendor-without-check
.PHONY: vendor-dev
vendor-dev: require init-submodules npm-install-dev vendor-without-npm
.PHONY: bun
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
@@ -1879,8 +1836,11 @@ regenerate-bindings:
@make bindings -j$(CPU_COUNT)
.PHONY: setup
setup: vendor-dev identifier-cache clean-bindings
make jsc-check
setup: require
make init-submodules
cd test && $(NPM_CLIENT) install --production
cd packages/bun-types && $(NPM_CLIENT) install --production
make vendor-without-check builtins identifier-cache clean-bindings
make bindings -j$(CPU_COUNT)
@echo ""
@echo "Development environment setup complete"

View File

@@ -48,7 +48,7 @@ bunx cowsay 'Hello, world!' # execute a package
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
>
>
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
```sh
@@ -67,6 +67,7 @@ docker pull oven/bun
docker run --rm --init --ulimit memlock=-1:-1 oven/bun
```
### Upgrade
To upgrade to the latest version of Bun, run:
@@ -99,7 +100,7 @@ bun upgrade --canary
- [Runtime](https://bun.sh/docs/runtime/index)
- [Module resolution](https://bun.sh/docs/runtime/modules)
- [Hot &amp; live reloading](https://bun.sh/docs/runtime/hot)
- [Plugins](https://bun.sh/docs/bundler/plugins)
- [Plugins](https://bun.sh/docs/runtime/plugins)
- Ecosystem
- [Node.js](https://bun.sh/docs/ecosystem/nodejs)
- [TypeScript](https://bun.sh/docs/ecosystem/typescript)
@@ -126,6 +127,7 @@ bun upgrade --canary
- [DNS](https://bun.sh/docs/api/dns)
- [Node-API](https://bun.sh/docs/api/node-api)
## Contributing
Refer to the [Project > Development](https://bun.sh/docs/project/development) guide to start contributing to Bun.

Binary file not shown.

View File

@@ -1,171 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
esbuild

View File

@@ -1,40 +0,0 @@
# Bundler benchmark
This is a performance benchmark of the following bundlers:
- Bun
- esbuild
- Parcel 2
- Rollup + Terser
- Webpack
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
To run the benchmark:
```sh
$ chmod +x run-bench.sh
$ ./run-bench.sh
```
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
```sh
real <number>
user <number>
sys <number>
```
These lines are generated by the `time` command which is used to benchmark each build.
## Results
The `real` results, as run on a 16-inch M1 Macbook Pro:
| Bundler | Time |
| ------- | ------ |
| Bun | 0.17s |
| esbuild | 0.33s |
| Rollup | 18.82s |
| Webpack | 26.21 |
| Parcel | 17.95s |

Binary file not shown.

View File

@@ -1 +0,0 @@
console.log("Hello via Bun!");

View File

@@ -1,8 +0,0 @@
{
"name": "bundle",
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.5.0"
}
}

View File

@@ -1,3 +0,0 @@
git clone git@github.com:colinhacks/esbuild.git
cd esbuild
make bench-three

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "bundler",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,31 +0,0 @@
import EventEmitter3 from "eventemitter3";
import { group } from "mitata";
import EventEmitterNative from "node:events";
export const implementations = [
{
EventEmitter: EventEmitterNative,
name: process.isBun ? (EventEmitterNative.init ? "bun" : "C++") : "node:events",
monkey: true,
},
// { EventEmitter: EventEmitter3, name: "EventEmitter3" },
].filter(Boolean);
for (const impl of implementations) {
impl.EventEmitter?.setMaxListeners?.(Infinity);
}
export function groupForEmitter(name, cb) {
if (implementations.length === 1) {
return cb({
...implementations[0],
name: `${name}: ${implementations[0].name}`,
});
} else {
return group(name, () => {
for (let impl of implementations) {
cb(impl);
}
});
}
}

View File

@@ -1,96 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("single emit", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("on x 10_000 (handler)", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
if (!called) throw new Error("not called");
});
});
// for (let { impl: EventEmitter, name, monkey } of []) {
// if (monkey) {
// var monkeyEmitter = Object.assign({}, EventEmitter.prototype);
// monkeyEmitter.on("hello", event => {
// event.preventDefault();
// });
// bench(`[monkey] ${className}.emit`, () => {
// var called = false;
// monkeyEmitter.emit("hello", {
// preventDefault() {
// id++;
// called = true;
// },
// });
// if (!called) {
// throw new Error("monkey failed");
// }
// });
// bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
// var cb = () => {
// event.preventDefault();
// };
// monkeyEmitter.on("hey", cb);
// for (let i = 0; i < 10_000; i++)
// monkey.emit("hey", {
// preventDefault() {
// id++;
// },
// });
// monkeyEmitter.off("hey", cb);
// });
// }
// }
// var target = new EventTarget();
// target.addEventListener("hello", event => {});
// bench("EventTarget.dispatch", () => {
// target.dispatchEvent(event);
// });
// var hey = new Event("hey");
// bench("EventTarget.on x 10_000 (handler)", () => {
// var handler = event => {};
// target.addEventListener("hey", handler);
// for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
// target.removeEventListener("hey", handler);
// });
await run();

View File

@@ -1,40 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("test 1", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("test 2", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
await run();

View File

@@ -1,63 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
let rngState = 123456789;
function nextInt() {
const m = 0x80000000; // 2**31;
const a = 1103515245;
const c = 12345;
rngState = (a * rngState + c) % m;
return rngState;
}
function nextRange(start, end) {
// returns in range [start, end): including start, excluding end
// can't modulu nextInt because of weak randomness in lower bits
const rangeSize = end - start;
const randomUnder1 = nextInt() / 0x7fffffff; // 2**31 - 1
return start + Math.floor(randomUnder1 * rangeSize);
}
const chunks = new Array(1024).fill(null).map((_, j) => {
const arr = new Uint8Array(1024);
for (let i = 0; i < arr.length; i++) {
arr[i] = nextRange(0, 256);
}
return arr;
});
groupForEmitter("stream simulation", ({ EventEmitter, name }) => {
bench(name, () => {
let id = 0;
const stream = new EventEmitter();
stream.on("start", res => {
if (res.status !== 200) throw new Error("not 200");
});
const recived = [];
stream.on("data", req => {
recived.push(req);
});
stream.on("end", ev => {
ev.preventDefault();
});
// simulate a stream
stream.emit("start", { status: 200 });
for (let chunk of chunks) {
stream.emit("data", chunk);
}
stream.emit("end", {
preventDefault() {
id++;
},
});
if (id !== 1) throw new Error("not implemented right");
if (recived.length !== 1024) throw new Error("not implemented right");
});
});
await run();

View File

@@ -1,13 +1,11 @@
{
"name": "bench",
"dependencies": {
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7",
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"mitata": "^0.1.6",
"esbuild": "^0.14.12",
"eventemitter3": "^5.0.0",
"mitata": "^0.1.6"
"@swc/core": "^1.2.133",
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7"
},
"scripts": {
"ffi": "cd ffi && bun run deps && bun run build && bun run bench",

View File

@@ -1,5 +1,5 @@
// to run this:
// NODE_ENV=production bun react-hello-world.jsx
// NODE_ENV=production bun --jsx-production react-hello-world.jsx
// Make sure you're using react-dom@18.3.0 or later.
// Currently that is available at react-dom@next (which is installed in this repository)

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,6 +1,5 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import * as assert from "assert";
bench("deepEqual", () => {

View File

@@ -1,9 +1,6 @@
// @runtime bun,node,deno
import { bench, run } from "./runner.mjs";
import process from "node:process";
import { Buffer } from "node:buffer";
import { bench, run } from "mitata";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";

BIN
bench/snippets/bun.lockb Executable file

Binary file not shown.

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import { readFileSync } from "fs";
import { allocUnsafe } from "bun";

View File

@@ -1,15 +1,15 @@
// so it can run in environments without node module resolution
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import crypto from "node:crypto";
var foo = Buffer.allocUnsafe(512);
var foo = Buffer.allocUnsafe(16384);
foo.fill(123);
// if ("Bun" in globalThis) {
// const { CryptoHasher } = Bun;
// bench("Bun.CryptoHasher(sha512)", () => {
// var hasher = new CryptoHasher("sha512");
// bench("CryptoHasher Blake2b256", () => {
// var hasher = new CryptoHasher("blake2b256");
// hasher.update(foo);
// hasher.digest();
// });

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import fastDeepEquals from "fast-deep-equal/es6/index";
// const Date = globalThis.Date;

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,5 +1,5 @@
import { dns } from "bun";
import { bench, run, group } from "./runner.mjs";
import { bench, run, group } from "mitata";
async function forEachBackend(name, fn) {
group(name, () => {

101
bench/snippets/emitter.mjs Normal file
View File

@@ -0,0 +1,101 @@
// **so this file can run in node**
import { createRequire } from "node:module";
const require = createRequire(import.meta.url);
// --
const EventEmitterNative = require("node:events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", event => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
var target = new EventTarget();
target.addEventListener("hello", event => {});
bench("EventTarget.dispatch", () => {
target.dispatchEvent(event);
});
var hey = new Event("hey");
bench("EventTarget.on x 10_000 (handler)", () => {
var handler = event => {};
target.addEventListener("hey", handler);
for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
target.removeEventListener("hey", handler);
});
await run();

View File

@@ -1,5 +1,5 @@
import { group } from "./runner.mjs";
import { bench, run } from "./runner.mjs";
import { group } from "mitata";
import { bench, run } from "mitata";
import { encode as htmlEntityEncode } from "html-entities";
import { escape as heEscape } from "he";

View File

@@ -1,5 +1,5 @@
import { viewSource, dlopen, CString, ptr, toBuffer, toArrayBuffer, FFIType, callback } from "bun:ffi";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
const types = {
returns_true: {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,57 +0,0 @@
import { bench, run } from "./runner.mjs";
var obj = {
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false,
},
"json": {
"strict": false,
"limit": "100kb",
},
"urlencoded": {
"extended": true,
"limit": "100kb",
boop: {
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false,
},
"json": {
"strict": false,
"limit": "100kb",
},
"urlencoded": {
"extended": true,
"limit": "100kb",
},
"cors": false,
},
},
},
"cors": false,
},
};
var big = JSON.stringify(obj);
bench("JSON.parse(obj)", () => {
globalThis.foo = JSON.parse(big);
});
bench("JSON.stringify(obj)", () => {
globalThis.bar = JSON.stringify(obj);
});
await run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,42 +0,0 @@
// @runtime node, bun
import { bench, run } from "./runner.mjs";
import * as vm from "node:vm";
const context = {
animal: "cat",
count: 2,
};
const script = new vm.Script("animal = 'hey'");
vm.createContext(context);
bench("vm.Script.runInContext", () => {
script.runInContext(context);
});
bench("vm.Script.runInThisContext", () => {
script.runInThisContext(context);
});
bench("vm.Script.runInNewContext", () => {
script.runInNewContext(context);
});
bench("vm.runInContext", () => {
vm.runInContext("animal = 'hey'", context);
});
bench("vm.runInNewContext", () => {
vm.runInNewContext("animal = 'hey'", context);
});
bench("vm.runInThisContext", () => {
vm.runInThisContext("animal = 'hey'", context);
});
bench("vm.createContext", () => {
vm.createContext({ yo: 1 });
});
await run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -0,0 +1,7 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
},
"prettier": "../../.prettierrc.cjs"
}

View File

@@ -1,5 +1,5 @@
import { group } from "./runner.mjs";
import { bench, run } from "./runner.mjs";
import { group } from "mitata";
import { bench, run } from "mitata";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import { renderToReadableStream } from "react-dom/server.browser";
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";

View File

@@ -1,5 +1,5 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = (function () {
const text = "Hello World!";

View File

@@ -1,5 +1,5 @@
import { readdirSync } from "fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { argv } from "process";
const dir = argv.length > 2 ? argv[2] : "/tmp";

View File

@@ -1,244 +0,0 @@
// note: this isn't done yet
// we look for `// @runtime` in the file to determine which runtimes to run the benchmark in
import { spawnSync } from "bun";
import { readdirSync, readFileSync } from "node:fs";
import { Database } from "bun:sqlite";
import { extname, basename } from "path";
const exts = [".js", ".ts", ".mjs", ".tsx"];
const runtimes = {
bun: process.execPath,
node: process.env.NODE ?? Bun.which("node"),
deno: process.env.DENO ?? Bun.which("deno"),
};
if (process.env.BUN_ONLY) {
delete runtimes.node;
delete runtimes.deno;
}
function getEntry(sourceContents, file) {
const targetLineStart = sourceContents.indexOf("// @runtime ");
if (targetLineStart === -1) {
return;
}
const targetLineEnd = sourceContents.indexOf("\n", targetLineStart);
if (targetLineEnd === -1) {
return;
}
const targetLine = sourceContents.slice(targetLineStart, targetLineEnd);
const targets = targetLine
.slice("// @runtime ".length)
.split(/[,\s]+/gm)
.map(a => a.trim().toLowerCase())
.filter(Boolean)
.sort();
if (targets.length === 0) {
throw new TypeError("No targets specified in " + JSON.stringify(file) + "\n> " + JSON.stringify(targetLine) + "\n");
}
var cmds = {};
for (let target of targets) {
if (!(target in runtimes)) {
throw new TypeError(
"Unknown target " + JSON.stringify(target) + "\n> " + targetLine + "\n file:" + JSON.stringify(file),
);
}
switch (target) {
case "bun": {
if (!runtimes.bun) {
continue;
}
cmds.bun = [runtimes.bun, "run", file];
break;
}
case "node": {
if (!runtimes.node) {
continue;
}
cmds.node = [runtimes.node, file];
break;
}
case "deno": {
if (!runtimes.deno) {
continue;
}
cmds.deno = [runtimes.deno, "run", "-A", "--unstable", file];
break;
}
default: {
throw new Error("This should not be reached.");
break;
}
}
}
if (Object.keys(cmds).length === 0) {
return;
}
return cmds;
}
function scan() {
const queue = [];
for (let file of readdirSync(import.meta.dir)) {
if (!exts.includes(extname(file))) continue;
if (file.includes("runner")) continue;
const cmds = getEntry(readFileSync(file, "utf8"), file);
if (!cmds) continue;
queue.push({ file, cmds });
}
return queue;
}
const env = {
...process.env,
BENCHMARK_RUNNER: "1",
NODE_NO_WARNINGS: "1",
NODE_OPTIONS: "--no-warnings",
BUN_DEBUG_QUIET_LOGS: "1",
NO_COLOR: "1",
DISABLE_COLORS: "1",
};
function* run({ cmds, file }) {
const benchmarkID = basename(file)
.toLowerCase()
.replace(/\.m?js$/, "")
.replace(/\.tsx?$/, "")
.replace(".node", "")
.replace(".deno", "")
.replace(".bun", "");
// if benchmarkID doesn't contain only words, letters or numbers or dashes or underscore, throw
if (!/^[a-z0-9_-]+$/i.test(benchmarkID)) {
throw new Error(
"Benchmark files must only contain /a-zA-Z0-9-_/ " +
JSON.stringify(benchmarkID) +
" in file " +
JSON.stringify(file),
);
}
for (let runtime in cmds) {
const timestamp = Date.now();
const spawnStart = performance.now();
var { stdout, exitCode } = spawnSync({
cmd: cmds[runtime],
env,
stderr: "inherit",
stdout: "pipe",
});
const spawnElapsed = performance.now() - spawnStart;
stdout = stdout.toString();
try {
const json = JSON.parse(stdout.trim());
yield {
file: file,
benchmarkID,
result: json,
runtime: runtime,
timestamp,
elapsed: spawnElapsed,
runtimeVersion: (runtime === "bun"
? `${Bun.version}-${Bun.revision}`
: String(json?.runtime).split(" ").at(1)
).replace(/^v/, ""),
};
} catch (e) {
console.error("Failing file", file);
console.error(JSON.stringify(cmds[runtime]));
console.error(stdout.toString());
throw e;
}
if (exitCode !== 0) {
throw new Error("Non-zero exit code in file " + JSON.stringify(file) + ", runtime: " + JSON.stringify(runtime));
}
}
}
const db = Database.open(process.RUNNER_DB_PATH ?? `runs-${process.platform}-${process.arch}.db`);
db.run(`CREATE TABLE IF NOT EXISTS benchmarkResults (
id INTEGER PRIMARY KEY AUTOINCREMENT,
results TEXT NOT NULL
);`);
db.run(`
CREATE TABLE IF NOT EXISTS runs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
runtime TEXT NOT NULL,
runtimeVersion TEXT NOT NULL,
benchmarkID TEXT NOT NULL,
timestamp INTEGER NOT NULL,
elapsed REAL NOT NULL,
benchmarkResultID INTEGER NOT NULL
);
`);
db.run(`CREATE INDEX IF NOT EXISTS runs_benchmarkID ON runs (benchmarkID);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_timestamp ON runs (timestamp);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_runtime ON runs (runtime);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_runtimeVersion ON runs (runtimeVersion);`);
db.run(`CREATE INDEX IF NOT EXISTS runs_benchmarkResultID ON runs (benchmarkResultID);`);
// TODO: finish this
for (let result of scan()) {
var prevBenchmarkID = null;
for (let {
runtime,
benchmarkID,
runtimeVersion,
timestamp,
elapsed,
file,
result: { benchmarks },
} of run(result)) {
if (prevBenchmarkID !== benchmarkID) {
console.log("\n" + `${benchmarkID}:`);
prevBenchmarkID = benchmarkID;
}
console.log(
" ",
`[${Math.round(elapsed)}ms]`,
"Executed",
JSON.stringify(benchmarkID),
"in",
runtime,
runtimeVersion,
"(" + file + ")",
);
const { id: benchmarkResultID } = db
.query(
`
INSERT INTO benchmarkResults (results) VALUES (?) RETURNING id
`,
)
.get(JSON.stringify(benchmarks));
db.run(
`
INSERT INTO runs (runtime, runtimeVersion, benchmarkID, timestamp, elapsed, benchmarkResultID) VALUES (
?, ?, ?, ?, ?, ?)`,
runtime,
runtimeVersion,
benchmarkID,
timestamp,
elapsed,
benchmarkResultID,
);
}
}

View File

@@ -1,22 +0,0 @@
import * as Mitata from "../node_modules/mitata/src/cli.mjs";
import process from "node:process";
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
export function run(opts = {}) {
opts ??= {};
if (asJSON) {
opts.json = true;
}
return Mitata.run(opts);
}
export function bench(name, fn) {
return Mitata.bench(name, fn);
}
export function group(name, fn) {
return Mitata.group(name, fn);
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { SHA512 } from "bun";
bench('SHA512.hash("hello world")', () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { createHash } from "crypto";
bench('createHash("sha256").update("hello world").digest()', () => {

View File

@@ -1,5 +1,5 @@
import { spawnSync } from "bun";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var memory = new Uint8Array(128 * 1024 * 1024);
memory.fill(10);

View File

@@ -1,5 +1,5 @@
import { spawnSync } from "child_process";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var memory = new Uint8Array(128 * 1024 * 1024);
memory.fill(10);

View File

@@ -1,5 +1,5 @@
import { spawnSync } from "bun";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("spawnSync echo hi", () => {
spawnSync({ cmd: ["echo", "hi"] });

View File

@@ -1,6 +1,5 @@
// @runtime bun,node,deno
import { spawnSync } from "node:child_process";
import { bench, run } from "./runner.mjs";
import { spawnSync } from "child_process";
import { bench, run } from "mitata";
bench("spawnSync echo hi", () => {
spawnSync("echo", ["hi"], { encoding: "buffer", shell: false });

View File

@@ -1,5 +1,5 @@
import { readdirSync, statSync } from "fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { argv } from "process";
const dir = argv.length > 2 ? argv[2] : "/tmp";

View File

@@ -1,4 +1,4 @@
import { run, bench } from "./runner.mjs";
import { run, bench } from "mitata";
var writer = globalThis.Bun ? Bun.stderr.writer() : undefined;
if (writer)

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { StringDecoder } from "string_decoder";
var short = Buffer.from("Hello World!");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,7 +1,7 @@
import { readFileSync } from "fs";
import { dirname } from "path";
import { fileURLToPath } from "url";
import { bench, run, group } from "./runner.mjs";
import { bench, run, group } from "mitata";
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const esbuild_ = require("esbuild/lib/main");

View File

@@ -1,42 +0,0 @@
import { Buffer } from "node:buffer";
import { writeFile } from "node:fs/promises";
import { bench, run } from "./runner.mjs";
var hugeFile = Buffer.alloc(1024 * 1024 * 64);
var medFile = Buffer.alloc(1024 * 1024 * 16);
var humongousFile = Buffer.alloc(1024 * 1024 * 256);
bench(
`fs.writeFile ${new Intl.NumberFormat("en-US", {
style: "unit",
unit: "megabyte",
unitDisplay: "narrow",
}).format(humongousFile.byteLength / 1024 / 1024)}`,
async () => {
await writeFile("/tmp/bun.bench-out.humongousFile.txt" + ((Math.random() * 65432) | 0).toString(16), humongousFile);
},
);
bench(
`fs.writeFile ${new Intl.NumberFormat("en-US", {
style: "unit",
unit: "megabyte",
unitDisplay: "narrow",
}).format(hugeFile.byteLength / 1024 / 1024)}`,
async () => {
await writeFile("/tmp/bun.bench-out.huge.txt" + ((Math.random() * 65432) | 0).toString(16), hugeFile);
},
);
bench(
`fs.writeFile ${new Intl.NumberFormat("en-US", {
style: "unit",
unit: "megabyte",
unitDisplay: "narrow",
}).format(medFile.byteLength / 1024 / 1024)}`,
async () => {
await writeFile("/tmp/bun.bench-out.medium.txt" + ((Math.random() * 65432) | 0).toString(16), medFile);
},
);
await run();

View File

@@ -1,5 +1,5 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { write } from "bun";
import { openSync } from "fs";

View File

@@ -1,9 +1,7 @@
// @runtime node, bun, deno
import { bench, run } from "./runner.mjs";
import { Buffer } from "node:buffer";
import { openSync } from "node:fs";
import { writeFile } from "node:fs/promises";
import { writeSync as write } from "node:fs";
import { bench, run } from "mitata";
import { openSync } from "fs";
import { writeFile } from "fs/promises";
import { writeSync as write } from "fs";
bench("writeFile(/tmp/foo.txt, short string)", async () => {
await writeFile("/tmp/foo.txt", "short string", "utf8");

View File

@@ -1,61 +0,0 @@
const { io } = require("socket.io-client");
const port = process.env.PORT || 3000;
const URL = `ws://localhost:${port}`;
const MAX_CLIENTS = 250;
const BATCHSIZE = MAX_CLIENTS / 10;
const BATCH_INTERVAL_IN_MS = 1000;
const EMIT_INTERVAL_IN_MS = 50;
let clientCount = 0;
let lastReport = new Date().getTime();
let packetsSinceLastReport = 0;
const clients = [];
const createClient = () => {
const socket = io(URL);
clients.push(socket);
socket.on("server to client event", () => {
packetsSinceLastReport++;
});
socket.on("disconnect", reason => {
console.log(`disconnect due to ${reason}`);
});
};
let emitInterval = null;
const createClients = () => {
for (let i = 0; i < BATCHSIZE; i++) {
createClient();
clientCount++;
}
if (clientCount < MAX_CLIENTS) {
setTimeout(createClients, BATCH_INTERVAL_IN_MS);
}
if (!emitInterval) {
emitInterval = setInterval(() => {
clients.forEach(socket => {
socket.emit("client to server event", "hello world");
});
}, EMIT_INTERVAL_IN_MS);
}
};
createClients();
const printReport = () => {
const now = new Date().getTime();
const durationSinceLastReport = (now - lastReport) / 1000;
const packetsPerSeconds = (packetsSinceLastReport / durationSinceLastReport).toFixed(2);
console.log(`client count: ${clientCount} ; average packets received per second: ${packetsPerSeconds}`);
packetsSinceLastReport = 0;
lastReport = now;
};
setInterval(printReport, 1000);

View File

@@ -1,13 +0,0 @@
const http = require("http").createServer();
const io = require("socket.io")(http);
const port = process.env.PORT || 3000;
io.on("connection", socket => {
socket.on("client to server event", msg => {
io.emit("server to client event", msg);
});
});
http.listen(port, () => {
console.log(`Socket.IO server running at http://localhost:${port}/`);
});

View File

@@ -1,4 +1,4 @@
import { Database } from "https://deno.land/x/sqlite3@0.9.1/mod.ts";
import { Database } from "https://deno.land/x/sqlite3@0.8.0/mod.ts";
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
const db = new Database("./src/northwind.sqlite");

View File

@@ -1 +0,0 @@
hello.txt

View File

@@ -1,35 +0,0 @@
# HTTP request file upload benchmark
This is a simple benchmark of uploading a file to a web server in different runtimes.
## Usage
Generate a file to upload (default is `hello.txt`):
```bash
bun generate-file.js
```
Run the server:
```bash
node server-node.mjs
```
Run the benchmark in bun:
```bash
bun stream-file-bun.js
```
Run the benchmark in node:
```bash
node stream-file-node.mjs
```
Run the benchmark in deno:
```bash
deno run -A stream-file-deno.js
```

File diff suppressed because one or more lines are too long

View File

@@ -1,15 +0,0 @@
import { createServer } from "node:http";
const server = createServer((req, res) => {
var chunkSize = 0;
req.on("data", chunk => {
chunkSize += chunk.byteLength;
});
req.on("end", () => {
console.log("Received", chunkSize, "bytes");
res.end(`${chunkSize}`);
});
});
server.listen(parseInt(process.env.PORT ?? "3000"), (err, port) => {
console.log(`http://localhost:${server.address().port}`);
});

View File

@@ -1,9 +0,0 @@
import { file } from "bun";
console.time("stream-file-bun");
const response = await fetch(process.env.URL ?? "http://localhost:3000", {
method: "POST",
body: file(process.env.FILE ?? "hello.txt"),
});
console.timeEnd("stream-file-bun");
console.log("Sent", await response.text(), "bytes");

View File

@@ -1,12 +0,0 @@
const file = await Deno.open(Deno.env.get("FILE") ?? "hello.txt", {
read: true,
});
console.time("stream-file-deno");
const response = await fetch(Deno.env.get("URL") ?? "http://localhost:3000", {
method: "POST",
body: file.readable,
});
console.timeEnd("stream-file-deno");
console.log("Sent", await response.text(), "bytes");

View File

@@ -1,19 +0,0 @@
import { createReadStream } from "node:fs";
import http from "node:http";
console.time("stream-file-node");
createReadStream(process.env.FILE ?? "hello.txt")
.pipe(
http
.request(process.env.URL ?? "http://localhost:3000", {
method: "POST",
})
.on("response", response => {
response.on("data", data => {
console.log("Sent", parseInt(data.toString(), 10), "bytes");
});
}),
)
.on("close", () => {
console.timeEnd("stream-file-node");
});

141
build.zig
View File

@@ -44,6 +44,10 @@ const color_map = std.ComptimeStringMap([]const u8, .{
});
fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: []const u8, target: anytype) !void {
var bun = b.createModule(.{
.source_file = FileSource.relative("src/bun_redirect.zig"),
});
var io: *Module = brk: {
if (target.isDarwin()) {
break :brk b.createModule(.{
@@ -61,6 +65,7 @@ fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: [
};
step.addModule("async_io", io);
step.addModule("bun", bun);
}
const BunBuildOptions = struct {
@@ -71,24 +76,6 @@ const BunBuildOptions = struct {
sizegen: bool = false,
base_path: [:0]const u8 = "",
runtime_js_version: u64 = 0,
fallback_html_version: u64 = 0,
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
const runtime_hash = std.hash.Wyhash.hash(
0,
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
);
this.runtime_js_version = runtime_hash;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
const fallback_hash = std.hash.Wyhash.hash(
0,
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
);
this.fallback_html_version = fallback_hash;
}
pub fn step(this: BunBuildOptions, b: anytype) *std.build.OptionsStep {
var opts = b.addOptions();
opts.addOption(@TypeOf(this.canary), "is_canary", this.canary);
@@ -97,8 +84,6 @@ const BunBuildOptions = struct {
opts.addOption(@TypeOf(this.bindgen), "bindgen", this.bindgen);
opts.addOption(@TypeOf(this.sizegen), "sizegen", this.sizegen);
opts.addOption(@TypeOf(this.base_path), "base_path", this.base_path);
opts.addOption(@TypeOf(this.runtime_js_version), "runtime_js_version", this.runtime_js_version);
opts.addOption(@TypeOf(this.fallback_html_version), "fallback_html_version", this.fallback_html_version);
return opts;
}
};
@@ -125,6 +110,28 @@ const fmt = struct {
}
};
fn updateRuntime() anyerror!void {
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
const runtime_hash = std.hash.Wyhash.hash(
0,
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
);
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
defer runtime_version_file.close();
runtime_version_file.writer().print("{any}", .{fmt.hexInt(runtime_hash)}) catch unreachable;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
const fallback_hash = std.hash.Wyhash.hash(
0,
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
);
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
fallback_version_file.writer().print("{any}", .{fmt.hexInt(fallback_hash)}) catch unreachable;
fallback_version_file.close();
}
var x64 = "x64";
var optimize: std.builtin.OptimizeMode = undefined;
@@ -192,6 +199,8 @@ pub fn build(b: *Build) !void {
else
"root.zig";
updateRuntime() catch {};
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMin().semver
else
@@ -267,8 +276,6 @@ pub fn build(b: *Build) !void {
obj.target.cpu_model = .{ .explicit = &std.Target.aarch64.cpu.generic };
}
try default_build_options.updateRuntime();
// we have to dump to stderr because stdout is read by zls
std.io.getStdErr().writer().print("Build {s} v{} - v{} ({s})\n", .{
triplet,
@@ -452,9 +459,38 @@ pub fn build(b: *Build) !void {
}
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try linkObjectFiles(b, headers_obj, target);
headers_step.dependOn(&headers_obj.step);
headers_obj.addOptions("build_options", default_build_options.step(b));
// var iter = headers_obj.modules.iterator();
// while (iter.next()) |item| {
// const module = @ptrCast(*Module, item.value_ptr);
// }
// // while (headers_obj.modules.)
// for (headers_obj.packages.items) |pkg_| {
// const pkg: std.build.Pkg = pkg_;
// if (std.mem.eql(u8, pkg.name, "clap")) continue;
// var test_ = b.addTestSource(pkg.source);
// b
// .test_.setMainPkgPath(obj.main_pkg_path.?);
// try configureObjectStep(b, test_, @TypeOf(target), target, obj.main_pkg_path.?);
// try linkObjectFiles(b, test_, target);
// test_.addOptions("build_options", default_build_options.step(b));
// if (pkg.dependencies) |children| {
// test_.packages = std.ArrayList(std.build.Pkg).init(b.allocator);
// try test_.packages.appendSlice(children);
// }
// var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
// var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "---\n\n" ++ "\x1b[0m", .{});
// headers_step.dependOn(&before.step);
// headers_step.dependOn(&test_.step);
// headers_step.dependOn(&after.step);
// }
}
b.default_step.dependOn(obj_step);
@@ -462,6 +498,67 @@ pub fn build(b: *Build) !void {
pub var original_make_fn: ?*const fn (step: *std.build.Step) anyerror!void = null;
// Due to limitations in std.build.Builder
// we cannot use this with debugging
// so I am leaving this here for now, with the eventual intent to switch to std.build.Builder
// but it is dead code
pub fn linkObjectFiles(b: *Build, obj: *CompileStep, target: anytype) !void {
if (target.getOsTag() == .freestanding)
return;
var dirs_to_search = std.BoundedArray([]const u8, 32).init(0) catch unreachable;
const arm_brew_prefix: []const u8 = "/opt/homebrew";
const x86_brew_prefix: []const u8 = "/usr/local";
try dirs_to_search.append(b.env_map.get("BUN_DEPS_OUT_DIR") orelse b.env_map.get("BUN_DEPS_DIR") orelse @as([]const u8, b.pathFromRoot("src/deps")));
if (target.getOsTag() == .macos) {
if (target.getCpuArch().isAARCH64()) {
try dirs_to_search.append(comptime arm_brew_prefix ++ "/opt/icu4c/lib/");
} else {
try dirs_to_search.append(comptime x86_brew_prefix ++ "/opt/icu4c/lib/");
}
}
if (b.env_map.get("JSC_LIB")) |jsc| {
try dirs_to_search.append(jsc);
}
var added = std.AutoHashMap(u64, void).init(b.allocator);
const files_we_care_about = std.ComptimeStringMap([]const u8, .{
.{ "libmimalloc.o", "libmimalloc.o" },
.{ "libz.a", "libz.a" },
.{ "libarchive.a", "libarchive.a" },
.{ "libssl.a", "libssl.a" },
.{ "picohttpparser.o", "picohttpparser.o" },
.{ "libcrypto.boring.a", "libcrypto.boring.a" },
.{ "libicuuc.a", "libicuuc.a" },
.{ "libicudata.a", "libicudata.a" },
.{ "libicui18n.a", "libicui18n.a" },
.{ "libJavaScriptCore.a", "libJavaScriptCore.a" },
.{ "libWTF.a", "libWTF.a" },
.{ "libbmalloc.a", "libbmalloc.a" },
.{ "liblolhtml.a", "liblolhtml.a" },
.{ "uSockets.a", "uSockets.a" },
});
for (dirs_to_search.slice()) |deps_path| {
var deps_dir = std.fs.cwd().openIterableDir(deps_path, .{}) catch continue;
var iterator = deps_dir.iterate();
obj.addIncludePath(deps_path);
obj.addLibraryPath(deps_path);
while (iterator.next() catch null) |entr| {
const entry: std.fs.IterableDir.Entry = entr;
if (files_we_care_about.get(entry.name)) |obj_name| {
var has_added = try added.getOrPut(std.hash.Wyhash.hash(0, obj_name));
if (!has_added.found_existing) {
var paths = [_][]const u8{ deps_path, obj_name };
obj.addObjectFile(try std.fs.path.join(b.allocator, &paths));
}
}
}
}
}
pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, comptime Target: type, target: Target, main_pkg_path: []const u8) !void {
obj.setMainPkgPath(main_pkg_path);

BIN
bun.lockb

Binary file not shown.

View File

@@ -119,7 +119,7 @@ _bun_completions() {
--jsx-runtime)
COMPREPLY=( $(compgen -W "automatic classic" -- "${cur_word}") );
return;;
--target)
--platform)
COMPREPLY=( $(compgen -W "browser node bun" -- "${cur_word}") );
return;;
-l|--loader)

View File

@@ -1,988 +0,0 @@
This page is intended as an introduction to working with binary data in JavaScript. Bun implements a number of data types and utilities for working with binary data, most of which are Web-standard. Any Bun-specific APIs will be noted as such.
Below is a quick "cheat sheet" that doubles as a table of contents. Click an item in the left column to jump to that section.
{% table %}
---
- [`TypedArray`](#typedarray)
- A family of classes that provide an `Array`-like interface for interacting with binary data. Includes `Uint8Array`, `Uint16Array`, `Int8Array`, and more.
---
- [`Buffer`](#buffer)
- A subclass of `Uint8Array` that implements a wide range of convenience methods. Unlike the other elements in this table, this is a Node.js API (which Bun implements). It can't be used in the browser.
---
- [`DataView`](#dataview)
- A class that provides a `get/set` API for writing some number of bytes to an `ArrayBuffer` at a particular byte offset. Often used reading or writing binary protocols.
---
- [`Blob`](#blob)
- A readonly blob of binary data usually representing a file. Has a MIME `type`, a `size`, and methods for converting to `ArrayBuffer`, `ReadableStream`, and string.
---
<!-- - [`File`](#file)
- _Browser only_. A subclass of `Blob` that represents a file. Has a `name` and `lastModified` timestamp. There is experimental support in Node.js v20; Bun does not support `File` yet; most of its functionality is provided by `BunFile`.
--- -->
- [`BunFile`](#bunfile)
- _Bun only_. A subclass of `Blob` that represents a lazily-loaded file on disk. Created with `Bun.file(path)`.
{% /table %}
## `ArrayBuffer` and views
Until 2009, there was no language-native way to store and manipulate binary data in JavaScript. ECMAScript v5 introduced a range of new mechanisms for this. The most fundamental building block is `ArrayBuffer`, a simple data structure that represents a sequence of bytes in memory.
```ts
// this buffer can store 8 bytes
const buf = new ArrayBuffer(8);
```
Despite the name, it isn't an array and supports none of the array methods and operators one might expect. In fact, there is no way to directly read or write values from an `ArrayBuffer`. There's very little you can do with one except check its size and create "slices" from it.
```ts
const buf = new ArrayBuffer(8);
buf.byteLength; // => 8
const slice = buf.slice(0, 4); // returns new ArrayBuffer
slice.byteLength; // => 4
```
To do anything interesting we need a construct known as a "view". A view is a class that _wraps_ an `ArrayBuffer` instance and lets you read and manipulate the underlying data. There are two types of views: _typed arrays_ and `DataView`.
### `DataView`
The `DataView` class is a lower-level interface for reading and manipulating the data in an `ArrayBuffer`.
Below we create a new `DataView` and set the first byte to 5.
```ts
const buf = new ArrayBuffer(4);
// [0x0, 0x0, 0x0, 0x0]
const dv = new DataView(buf);
dv.setUint8(0, 3); // write value 3 at byte offset 0
dv.getUint8(0); // => 3
// [0x11, 0x0, 0x0, 0x0]
```
Now lets write a `Uint16` at byte offset `1`. This requires two bytes. We're using the value `513`, which is `2 * 256 + 1`; in bytes, that's `00000010 00000001`.
```ts
dv.setUint16(1, 513);
// [0x11, 0x10, 0x1, 0x0]
console.log(dv.getUint16(1)); // => 513
```
We've now assigned a value to the first three bytes in our underlying `ArrayBuffer`. Even though the second and third bytes were created using `setUint16()`, we can still read each of its component bytes using `getUint8()`.
```ts
console.log(dv.getUint8(1)); // => 2
console.log(dv.getUint8(2)); // => 1
```
Attempting to write a value that requires more space than is available in the underlying `ArrayBuffer` will cuase an error. Below we attempt to write a `Float64` (which requires 8 bytes) at byte offset `0`, but there are only four total bytes in the buffer.
```ts
dv.setFloat64(0, 3.1415);
// ^ RangeError: Out of bounds access
```
The following methods are available on `DataView`:
{% table %}
- Getters
- Setters
---
- [`getBigInt64()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getBigInt64)
- [`setBigInt64()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setBigInt64)
---
- [`getBigUint64()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getBigUint64)
- [`setBigUint64()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setBigUint64)
---
- [`getFloat32()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getFloat32)
- [`setFloat32()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setFloat32)
---
- [`getFloat64()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getFloat64)
- [`setFloat64()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setFloat64)
---
- [`getInt16()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getInt16)
- [`setInt16()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setInt16)
---
- [`getInt32()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getInt32)
- [`setInt32()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setInt32)
---
- [`getInt8()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getInt8)
- [`setInt8()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setInt8)
---
- [`getUint16()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getUint16)
- [`setUint16()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setUint16)
---
- [`getUint32()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getUint32)
- [`setUint32()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setUint32)
---
- [`getUint8()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/getUint8)
- [`setUint8()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView/setUint8)
{% /table %}
### `TypedArray`
Typed arrays are a family of classes that provide an `Array`-like interface for interacting with data in an `ArrayBuffer`. Whereas a `DataView` lets you write numbers of varying size at a particular offset, a `TypedArray` interprets the underlying bytes as an array of numbers, each of a fixed size.
{% callout %}
**Note** — It's common to refer to this family of classes collectively by their shared superclass `TypedArray`. This class as _internal_ to JavaScript; you can't directly create instances of it, and `TypedArray` is not defined in the global scope. Think of it as an `interface` or an abstract class.
{% /callout %}
```ts
const buffer = new ArrayBuffer(3);
const arr = new Uint8Array(buffer);
// contents are initialized to zero
console.log(arr); // Uint8Array(3) [0, 0, 0]
// assign values like an array
arr[0] = 0;
arr[1] = 10;
arr[2] = 255;
arr[3] = 255; // no-op, out of bounds
```
While an `ArrayBuffer` is a generic sequence of bytes, these typed array classes interpret the bytes as an array of numbers of a given byte size.
The top row contains the raw bytes, and the later rows contain how these bytes will be interpreted when _viewed_ using different typed array classes.
The following classes are typed arrays, along with a description of how they interpret the bytes in an `ArrayBuffer`:
{% table %}
- Class
- Description
---
- [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array)
- Every one (1) byte is interpreted as an unsigned 8-bit integer. Range 0 to 255.
---
- [`Uint16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint16Array)
- Every two (2) bytes are interpreted as an unsigned 16-bit integer. Range 0 to 65535.
---
- [`Uint32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint32Array)
- Every four (4) bytes are interpreted as an unsigned 32-bit integer. Range 0 to 4294967295.
---
- [`Int8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int8Array)
- Every one (1) byte is interpreted as a signed 8-bit integer. Range -128 to 127.
---
- [`Int16Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int16Array)
- Every two (2) bytes are interpreted as a signed 16-bit integer. Range -32768 to 32767.
---
- [`Int32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Int32Array)
- Every four (4) bytes are interpreted as a signed 32-bit integer. Range -2147483648 to 2147483647.
---
- [`Float32Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float32Array)
- Every four (4) bytes are interpreted as a 32-bit floating point number. Range -3.4e38 to 3.4e38.
---
- [`Float64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Float64Array)
- Every eight (8) bytes are interpreted as a 64-bit floating point number. Range -1.7e308 to 1.7e308.
---
- [`BigInt64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt64Array)
- Every eight (8) bytes are interpreted as an unsigned `BigInt`. Range -9223372036854775808 to 9223372036854775807 (though `BigInt` is capable of representing larger numbers).
---
- [`BigUint64Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigUint64Array)
- Every eight (8) bytes are interpreted as an unsigned `BigInt`. Range 0 to 18446744073709551615 (though `BigInt` is capable of representing larger numbers).
---
- [`Uint8ClampedArray`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8ClampedArray)
- Same as `Uint8Array`, but automatically "clamps" to the range 0-255 when assigning a value to an element.
{% /table %}
The table below demonstrates how the bytes in an `ArrayBuffer` are interpreted when viewed using different typed array classes.
{% table %}
---
- `ArrayBuffer`
- `00000000`
- `00000001`
- `00000010`
- `00000011`
- `00000100`
- `00000101`
- `00000110`
- `00000111`
---
- `Uint8Array`
- 0
- 1
- 2
- 3
- 4
- 5
- 6
- 7
---
- `Uint16Array`
- 256 (`1 * 256 + 0`) {% colspan=2 %}
- 770 (`3 * 256 + 2`) {% colspan=2 %}
- 1284 (`5 * 256 + 4`) {% colspan=2 %}
- 1798 (`7 * 256 + 6`) {% colspan=2 %}
---
- `Uint32Array`
- 50462976 {% colspan=4 %}
- 117835012 {% colspan=4 %}
---
- `BigUint64Array`
- 506097522914230528n {% colspan=8 %}
{% /table %}
To create a typed array from a pre-defined `ArrayBuffer`:
```ts
// create typed array from ArrayBuffer
const buf = new ArrayBuffer(10);
const arr = new Uint8Array(buf);
arr[0] = 30;
arr[1] = 60;
// all elements are initialized to zero
console.log(arr); // => Uint8Array(10) [ 30, 60, 0, 0, 0, 0, 0, 0, 0, 0 ];
```
If we tried to instantiate a `Uint32Array` from this same `ArrayBuffer`, we'd get an error.
```ts
const buf = new ArrayBuffer(10);
const arr = new Uint32Array(buf);
// ^ RangeError: ArrayBuffer length minus the byteOffset
// is not a multiple of the element size
```
A `Uint32` value requires four bytes (16 bits). Because the `ArrayBuffer` is 10 bytes long, there's no way to cleanly divide its contents into 4-byte chunks.
To fix this, we can create a typed array over a particular "slice" of an `ArrayBuffer`. The `Uint16Array` below only "views" the _first_ 8 bytes of the underlying `ArrayBuffer`. To achieve these, we specify a `byteOffset` of `0` and a `length` of `2`, which indicates the number of `Uint32` numbers we want our array to hold.
```ts
// create typed array from ArrayBuffer slice
const buf = new ArrayBuffer(10);
const arr = new Uint32Array(buf, 0, 2);
/*
buf _ _ _ _ _ _ _ _ _ _ 10 bytes
arr [_______,_______] 2 4-byte elements
*/
arr.byteOffset; // 0
arr.length; // 2
```
You don't need to explicitly create an `ArrayBuffer` instance; you can instead directly specify a length in the typed array constructor:
```ts
const arr2 = new Uint8Array(5);
// all elements are initialized to zero
// => Uint8Array(5) [0, 0, 0, 0, 0]
```
Typed arrays can also be instantiated directly from an array of numbers, or another typed array:
```ts
// from an array of numbers
const arr1 = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7]);
arr1[0]; // => 0;
arr1[7]; // => 7;
// from another typed array
const arr2 = new Uint8Array(arr);
```
Broadly speaking, typed arrays provide the same methods as regular arrays, with a few exceptions. For example, `push` and `pop` are not available on typed arrays, because they would require resizing the underlying `ArrayBuffer`.
```ts
const arr = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7]);
// supports common array methods
arr.filter(n => n > 128); // Uint8Array(1) [255]
arr.map(n => n * 2); // Uint8Array(8) [0, 2, 4, 6, 8, 10, 12, 14]
arr.reduce((acc, n) => acc + n, 0); // 28
arr.forEach(n => console.log(n)); // 0 1 2 3 4 5 6 7
arr.every(n => n < 10); // true
arr.find(n => n > 5); // 6
arr.includes(5); // true
arr.indexOf(5); // 5
```
Refer to the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray) for more information on the properties and methods of typed arrays.
### `Uint8Array`
It's worth specifically highlighting `Uint8Array`, as it represents a classic "byte array"—a sequence of 8-bit unsigned integers between 0 and 255. This is the most common typed array you'll encounter in JavaScript.
It is the return value of [`TextEncoder#encode`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder), and the input type of [`TextDecoder#decode`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder), two utility classes designed to translate strings and various binary encodings, most notably `"utf-8"`.
```ts
const encoder = new TextEncoder();
const bytes = encoder.encode("hello world");
// => Uint8Array(11) [ 104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100 ]
const decoder = new TextDecoder();
const text = decoder.decode(bytes);
// => hello world
```
### `Buffer`
Bun implements `Buffer`, a Node.js API for working with binary data that pre-dates the introduction of typed arrays in the JavaScript spec. It has since been re-implemented as a subclass of `Uint8Array`. It provides a wide range of methods, including several Array-like and `DataView`-like methods.
```ts
const buf = Buffer.from("hello world");
// => Buffer(16) [ 116, 104, 105, 115, 32, 105, 115, 32, 97, 32, 115, 116, 114, 105, 110, 103 ]
buf.length; // => 11
buf[0]; // => 104, ascii for 'h'
buf.writeUInt8(72, 0); // => ascii for 'H'
console.log(buf.toString());
// => Hello world
```
For complete documentation, refer to the [Node.js documentation](https://nodejs.org/api/buffer.html).
## `Blob`
`Blob` is a Web API commonly used for representing files. `Blob` was initially implemented in browsers (unlike `ArrayBuffer` which is part of JavaScript itself), but it is now supported in Node and Bun.
It isn't common to directly create `Blob` instances. More often, you'll recieve instances of `Blob` from an external source (like an `<input type="file">` element in the browser) or library. That said, it is possible to create a `Blob` from one or more string or binary "blob parts".
```ts
const blob = new Blob(["<html>Hello</html>"], {
type: "text/html",
});
blob.type; // => text/html
blob.size; // => 19
```
These parts can be `string`, `ArrayBuffer`, `TypedArray`, `DataView`, or other `Blob` instances. The blob parts are concatenated together in the order they are provided.
```ts
const blob = new Blob([
"<html>",
new Blob(["<body>"]),
new Uint8Array([104, 101, 108, 108, 111]), // "hello" in binary
"</body></html>",
]);
```
The contents of a `Blob` can be asynchronously read in various formats.
```ts
await blob.text(); // => <html><body>hello</body></html>
await blob.arrayBuffer(); // => ArrayBuffer (copies contents)
await blob.stream(); // => ReadableStream
```
### `BunFile`
`BunFile` is a subclass of `Blob` used to represent a lazily-loaded file on disk. Like `File`, it adds a `name` and `lastModified` property. Unlike `File`, it does not require the file to be loaded into memory.
```ts
const file = Bun.file("index.txt");
// => BunFile
```
### `File`
{% callout %}
Browser only. Experimental support in Node.js 20.
{% /callout %}
[`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) is a subclass of `Blob` that adds a `name` and `lastModified` property. It's commonly used in the browser to represent files uploaded via a `<input type="file">` element. Node.js and Bun implement `File`.
```ts
// on browser!
// <input type="file" id="file" />
const files = document.getElementById("file").files;
// => File[]
```
```ts
const file = new File(["<html>Hello</html>"], "index.html", {
type: "text/html",
});
```
Refer to the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Blob) for complete docs information.
## Streams
Streams are an important abstraction for working with binary data without loading it all into memory at once. They are commonly used for reading and writing files, sending and receiving network requests, and processing large amounts of data.
Bun implements the Web APIs [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) and [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
{% callout %}
Bun also implements the `node:stream` module, including [`Readable`](https://nodejs.org/api/stream.html#stream_readable_streams), [`Writable`](https://nodejs.org/api/stream.html#stream_writable_streams), and [`Duplex`](https://nodejs.org/api/stream.html#stream_duplex_and_transform_streams). For complete documentation, refer to the Node.js docs.
{% /callout %}
To create a simple readable stream:
```ts
const stream = new ReadableStream({
start(controller) {
controller.enqueue("hello");
controller.enqueue("world");
controller.close();
},
});
```
The contents of this stream can be read chunk-by-chunk with `for await` syntax.
```ts
for await (const chunk of stream) {
console.log(chunk);
// => "hello"
// => "world"
}
```
For a more complete discusson of streams in Bun, see [API > Streams](/docs/api/streams).
## Conversion
Converting from one binary format to another is a common task. This section is intended as a reference.
### From `ArrayBuffer`
Since `ArrayBuffer` stores the data that underlies other binary structures like `TypedArray`, the snippets below are not _converting_ from `ArrayBuffer` to another format. Instead, they are _creating_ a new instance using the data stored underlying data.
#### To `TypedArray`
```ts
new Uint8Array(buf);
```
#### To `DataView`
```ts
new DataView(buf);
```
#### To `Buffer`
```ts
// create Buffer over entire ArrayBuffer
Buffer.from(buf);
// create Buffer over a slice of the ArrayBuffer
Buffer.from(buf, 0, 10);
```
#### To `string`
```ts
new TextDecoder().decode(buf);
```
#### To `number[]`
```ts
Array.from(new Uint8Array(buf));
```
#### To `Blob`
```ts
new Blob([buf], { type: "text/plain" });
```
<!-- #### To `File`
```ts
new File([buf], "filename.txt", { type: "text/plain", lastModified: Date.now() });
``` -->
#### To `ReadableStream`
The following snippet creates a `ReadableStream` and enqueues the entire `ArrayBuffer` as a single chunk.
```ts
new ReadableStream({
start(controller) {
controller.enqueue(buf);
controller.close();
},
});
```
{% details summary="With chunking" %}
To stream the `ArrayBuffer` in chunks, use a `Uint8Array` view and enqueue each chunk.
```ts
const view = new Uint8Array(buf);
const chunkSize = 1024;
new ReadableStream({
start(controller) {
for (let i = 0; i < view.length; i += chunkSize) {
controller.enqueue(view.slice(i, i + chunkSize));
}
controller.close();
},
});
```
{% /details %}
### From `TypedArray`
#### To `ArrayBuffer`
This retrieves the underlying `ArrayBuffer`. Note that a `TypedArray` can be a view of a _slice_ of the underlying buffer, so the sizes may differ.
```ts
arr.buffer;
```
#### To `DataView`
To creates a `DataView` over the same byte range as the TypedArray.
```ts
new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
```
#### To `Buffer`
```ts
Buffer.from(arr);
```
#### To `string`
```ts
new TextDecoder().decode(arr);
```
#### To `number[]`
```ts
Array.from(arr);
```
#### To `Blob`
```ts
new Blob([arr.buffer], { type: "text/plain" });
```
<!-- #### To `File`
```ts
new File([arr.buffer], "filename.txt", { type: "text/plain", lastModified: Date.now() });
``` -->
#### To `ReadableStream`
```ts
new ReadableStream({
start(controller) {
controller.enqueue(arr);
controller.close();
},
});
```
{% details summary="With chunking" %}
To stream the `ArrayBuffer` in chunks, split the `TypedArray` into chunks and enqueue each one individually.
```ts
new ReadableStream({
start(controller) {
for (let i = 0; i < arr.length; i += chunkSize) {
controller.enqueue(arr.slice(i, i + chunkSize));
}
controller.close();
},
});
```
{% /details %}
### From `DataView`
#### To `ArrayBuffer`
```ts
view.buffer;
```
#### To `TypedArray`
Only works if the `byteLength` of the `DataView` is a multiple of the `BYTES_PER_ELEMENT` of the `TypedArray` subclass.
```ts
new Uint8Array(view.buffer, view.byteOffset, view.byteLength);
new Uint16Array(view.buffer, view.byteOffset, view.byteLength / 2);
new Uint32Array(view.buffer, view.byteOffset, view.byteLength / 4);
// etc...
```
#### To `Buffer`
```ts
Buffer.from(view.buffer, view.byteOffset, view.byteLength);
```
#### To `string`
```ts
new TextDecoder().decode(view);
```
#### To `number[]`
```ts
Array.from(view);
```
#### To `Blob`
```ts
new Blob([view.buffer], { type: "text/plain" });
```
<!-- #### To `File`
```ts
new File([view.buffer], "filename.txt", { type: "text/plain", lastModified: Date.now() });
``` -->
#### To `ReadableStream`
```ts
new ReadableStream({
start(controller) {
controller.enqueue(view.buffer);
controller.close();
},
});
```
{% details summary="With chunking" %}
To stream the `ArrayBuffer` in chunks, split the `DataView` into chunks and enqueue each one individually.
```ts
new ReadableStream({
start(controller) {
for (let i = 0; i < view.byteLength; i += chunkSize) {
controller.enqueue(view.buffer.slice(i, i + chunkSize));
}
controller.close();
},
});
```
{% /details %}
### From `Buffer`
#### To `ArrayBuffer`
```ts
buf.buffer;
```
#### To `TypedArray`
```ts
new Uint8Array(buf);
```
#### To `DataView`
```ts
new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
```
#### To `string`
```ts
buf.toString();
```
#### To `number[]`
```ts
Array.from(buf);
```
#### To `Blob`
```ts
new Blob([buf], { type: "text/plain" });
```
<!-- #### To `File`
```ts
new File([buf], "filename.txt", { type: "text/plain", lastModified: Date.now() });
``` -->
#### To `ReadableStream`
```ts
new ReadableStream({
start(controller) {
controller.enqueue(buf);
controller.close();
},
});
```
{% details summary="With chunking" %}
To stream the `ArrayBuffer` in chunks, split the `Buffer` into chunks and enqueue each one individually.
```ts
new ReadableStream({
start(controller) {
for (let i = 0; i < buf.length; i += chunkSize) {
controller.enqueue(buf.slice(i, i + chunkSize));
}
controller.close();
},
});
```
{% /details %}
### From `Blob`
#### To `ArrayBuffer`
The `Blob` class provides a convenience method for this purpose.
```ts
await blob.arrayBuffer();
```
#### To `TypedArray`
```ts
new Uint8Array(await blob.arrayBuffer());
```
#### To `DataView`
```ts
new DataView(await blob.arrayBuffer());
```
#### To `Buffer`
```ts
Buffer.from(await blob.arrayBuffer());
```
#### To `string`
```ts
await blob.text();
```
#### To `number[]`
```ts
Array.from(new Uint8Array(await blob.arrayBuffer()));
```
#### To `ReadableStream`
```ts
blob.stream();
```
<!-- ### From `File` -->
### From `ReadableStream`
It's common to use [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) as a convenient intermediate representation to make it easier to convert `ReadableStream` to other formats.
```ts
stream; // ReadableStream
const buffer = new Response(stream).arrayBuffer();
```
However this approach is verbose and adds overhead that slows down overall performance unnecessarily. Bun implements a set of optimized convenience functions for converting `ReadableStream` various binary formats.
#### To `ArrayBuffer`
```ts
// with Response
new Response(stream).arrayBuffer();
// with Bun function
Bun.readableStreamToArrayBuffer(stream);
```
#### To `TypedArray`
```ts
// with Response
const buf = await new Response(stream).arrayBuffer();
new Uint8Array(buf);
// with Bun function
new Uint8Array(Bun.readableStreamToArrayBuffer(stream));
```
#### To `DataView`
```ts
// with Response
const buf = await new Response(stream).arrayBuffer();
new DataView(buf);
// with Bun function
new DataView(Bun.readableStreamToArrayBuffer(stream));
```
#### To `Buffer`
```ts
// with Response
const buf = await new Response(stream).arrayBuffer();
Buffer.from(buf);
// with Bun function
Buffer.from(Bun.readableStreamToArrayBuffer(stream));
```
#### To `string`
```ts
// with Response
new Response(stream).text();
// with Bun function
await Bun.readableStreamToString(stream);
```
#### To `number[]`
```ts
// with Response
const buf = await new Response(stream).arrayBuffer();
Array.from(new Uint8Array(buf));
// with Bun function
Array.from(new Uint8Array(Bun.readableStreamToArrayBuffer(stream)));
```
Bun provides a utility for resolving a `ReadableStream` to an array of its chunks. Each chunk may be a string, typed array, or `ArrayBuffer`.
```ts
// with Bun function
Bun.readableStreamToArray(stream);
```
#### To `Blob`
```ts
new Response(stream).blob();
```
<!-- #### To `File`
```ts
new Response(stream)
.blob()
.then(blob => new File([blob], "filename.txt", { type: "text/plain", lastModified: Date.now() }));
``` -->
#### To `ReadableStream`
To split a `ReadableStream` into two streams that can be consumed independently:
```ts
const [a, b] = stream.tee();
```
<!-- - Use Buffer
- TextEncoder
- `Bun.ArrayBufferSink`
- ReadableStream
- AsyncIterator
- TypedArray vs ArrayBuffer vs DataView
- Bun.indexOfLine
- “direct” readablestream
- readable stream has assumptions about
- its very generic
- all data is copies and queued
- direct : no queueing
- just a write function
- you can write strings
- more synchronous
- corking works better -->

View File

@@ -1,9 +1,5 @@
{% callout %}
<!-- **Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. Existing Node.js projects may use Bun's [nearly complete](/docs/runtime/nodejs-apis#node_fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module. -->
**Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. For operations that are not yet available with `Bun.file`, such as `mkdir`, you can use Bun's [nearly complete](/docs/runtime/nodejs-apis#node_fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module.
**Note** — The `Bun.file` and `Bun.write` APIs documented on this page are heavily optimized and represent the recommended way to perform file-system tasks using Bun. Existing Node.js projects may use Bun's [nearly complete](/docs/runtime/nodejs-apis#node_fs) implementation of the [`node:fs`](https://nodejs.org/api/fs.html) module.
{% /callout %}
Bun provides a set of optimized APIs for reading and writing files.

View File

@@ -80,7 +80,7 @@ Bun implements the following globals.
---
- `BuildMessage`
- `BuildError`
- Bun
- &nbsp;
@@ -194,12 +194,6 @@ Bun implements the following globals.
---
- [`JSON`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON)
- Web
- &nbsp;
---
- [`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
- Web
- &nbsp;
@@ -272,7 +266,7 @@ Bun implements the following globals.
---
- `ResolveMessage`
- `ResolveError`
- Bun
- &nbsp;

View File

@@ -1,139 +0,0 @@
{% callout %}
Bun implements the `createHash` and `createHmac` functions from [`node:crypto`](https://nodejs.org/api/crypto.html) in addition to the Bun-native APIs documented below.
{% /callout %}
## `Bun.hash`
`Bun.hash` is a collection of utilities for _non-cryptographic_ hashing. Non-cryptographic hashing algorithms are optimized for speed of computation over collision-resistance or security.
The standard `Bun.hash` functions uses [Wyhash](https://github.com/wangyi-fudan/wyhash) to generate a 64-bit hash from an input of arbitrary size.
```ts
Bun.hash("some data here");
// 976213160445840
```
The input can be a string, `TypedArray`, `DataView`, `ArrayBuffer`, or `SharedArrayBuffer`.
```ts
const arr = new Uint8Array([1, 2, 3, 4]);
Bun.hash("some data here");
Bun.hash(arr);
Bun.hash(arr.buffer);
Bun.hash(new DataView(arr.buffer));
```
Optionally, an integer seed can be specified as the second parameter.
```ts
Bun.hash("some data here", 1234);
// 1173484059023252
```
Additional hashing algorithms are available as properties on `Bun.hash`. The API is the same for each.
```ts
Bun.hash.wyhash("data", 1234); // equivalent to Bun.hash()
Bun.hash.crc32("data", 1234);
Bun.hash.adler32("data", 1234);
Bun.hash.cityHash32("data", 1234);
Bun.hash.cityHash64("data", 1234);
Bun.hash.murmur32v3("data", 1234);
Bun.hash.murmur64v2("data", 1234);
```
## `Bun.CryptoHasher`
`Bun.CryptoHasher` is a general-purpose utility class that lets you incrementally compute a hash of string or binary data using a range of cryptographic hash algorithms. The following algorithms are supported:
- `"blake2b256"`
- `"md4"`
- `"md5"`
- `"ripemd160"`
- `"sha1"`
- `"sha224"`
- `"sha256"`
- `"sha384"`
- `"sha512"`
- `"sha512-256"`
```ts
const hasher = new Bun.CryptoHasher("sha256");
hasher.update("hello world");
hasher.digest();
// Uint8Array(32) [ <byte>, <byte>, ... ]
```
Once initialized, data can be incrementally fed to to the hasher using `.update()`. This method accepts `string`, `TypedArray`, and `ArrayBuffer`.
```ts
const hasher = new Bun.CryptoHasher();
hasher.update("hello world");
hasher.update(new Uint8Array([1, 2, 3]));
hasher.update(new ArrayBuffer(10));
```
If a `string` is passed, an optional second parameter can be used to specify the encoding (default `'utf-8'`). The following encodings are supported:
{% table %}
---
- Binary encodings
- `"base64"` `"base64url"` `"hex"` `"binary"`
---
- Character encodings
- `"utf8"` `"utf-8"` `"utf16le"` `"latin1"`
---
- Legacy character encodings
- `"ascii"` `"binary"` `"ucs2"` `"ucs-2"`
{% /table %}
```ts
hasher.update("hello world"); // defaults to utf8
hasher.update("hello world", "hex");
hasher.update("hello world", "base64");
hasher.update("hello world", "latin1");
```
After the data has been feed into the hasher, a final hash can be computed using `.digest()`. By default, this method returns a `Uint8Array` containing the hash.
```ts
const hasher = new Bun.CryptoHasher();
hasher.update("hello world");
hasher.digest();
// => Uint8Array(32) [ 185, 77, 39, 185, 147, ... ]
```
The `.digest()` method can optionally return the hash as a string. To do so, specify an encoding:
```ts
hasher.digest("base64");
// => "uU0nuZNNPgilLlLX2n2r+sSE7+N6U4DukIj3rOLvzek="
hasher.digest("hex");
// => "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
```
Alternatively, the method can write the hash into a pre-existing `TypedArray` instance. This may be desirable in some performance-sensitive applications.
```ts
const arr = new Uint8Array(32);
hasher.digest(arr);
console.log(arr);
// => Uint8Array(32) [ 185, 77, 39, 185, 147, ... ]
```
<!-- Bun.sha; -->

View File

@@ -1,12 +1,19 @@
The page primarily documents the Bun-native `Bun.serve` API. Bun also implements [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) and the Node.js [`http`](https://nodejs.org/api/http.html) and [`https`](https://nodejs.org/api/https.html) modules.
{% callout %}
These modules have been re-implemented to use Bun's fast internal HTTP infrastructure. Feel free to use these modules directly; frameworks like [Express](https://expressjs.com/) that depend on these modules should work out of the box. For granular compatibility information, see [Runtime > Node.js APIs](/docs/runtime/nodejs-apis).
**Note** — This page documents the `Bun.serve` API. This API is heavily optimized and represents the recommended way to build HTTP servers in Bun. Existing Node.js projects may use Bun's [nearly complete](/docs/runtime/nodejs-apis#node_http) implementation of the Node.js [`http`](https://nodejs.org/api/http.html) and [`https`](https://nodejs.org/api/https.html) modules.
{% /callout %}
To start a high-performance HTTP server with a clean API, the recommended approach is [`Bun.serve`](#start-a-server-bun-serve).
## Send a request (`fetch()`)
## `Bun.serve()`
Bun implements the Web `fetch` API for making HTTP requests. The `fetch` function is available in the global scope.
```ts
const response = await fetch("https://bun.sh/manifest.json");
const result = (await response.json()) as any;
console.log(result.icons[0].src);
// => "/logo-square.jpg"
```
## Start a server (`Bun.serve()`)
Start an HTTP server in Bun with `Bun.serve`.
@@ -43,7 +50,7 @@ Bun.serve({
});
```
## Error handling
### Error handling
To activate development mode, set `development: true`. By default, development mode is _enabled_ unless `NODE_ENV` is `production`.
@@ -93,72 +100,36 @@ const server = Bun.serve({
server.stop();
```
## TLS
### TLS
Bun supports TLS out of the box, powered by [OpenSSL](https://www.openssl.org/). Enable TLS by passing in a value for `key` and `cert`; both are required to enable TLS. If needed, supply a `passphrase` to decrypt the `keyFile`.
Bun supports TLS out of the box, powered by [OpenSSL](https://www.openssl.org/). Enable TLS by passing in a value for `keyFile` and `certFile`; both are required to enable TLS. If needed, supply a `passphrase` to decrypt the `keyFile`.
```ts
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
// can be string, BunFile, TypedArray, Buffer, or array thereof
key: Bun.file("./key.pem"),
cert: Bun.file("./cert.pem"),
// passphrase, only required if key is encrypted
passphrase: "super-secret",
keyFile: "./key.pem", // path to TLS key
certFile: "./cert.pem", // path to TLS cert
passphrase: "super-secret", // optional passphrase
});
```
The `key` and `cert` fields expect the _contents_ of your TLS key and certificate. This can be a string, `BunFile`, `TypedArray`, or `Buffer`.
```ts
Bun.serve({
fetch() {},
// BunFile
key: Bun.file("./key.pem"),
// Buffer
key: fs.readFileSync("./key.pem"),
// string
key: fs.readFileSync("./key.pem", "utf8"),
// array of above
key: [Bun.file('./key1.pem'), Bun.file('./key2.pem']
});
```
{% callout %}
**Note** Earlier versions of Bun supported passing a file path as `keyFile` and `certFile`; this has been deprecated as of `v0.6.3`.
{% /callout %}
Optionally, you can override the trusted CA certificates by passing a value for `ca`. By default, the server will trust the list of well-known CAs curated by Mozilla. When `ca` is specified, the Mozilla list is overwritten.
The root CA and Diffie-Helman parameters can be configured as well.
```ts
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
key: Bun.file("./key.pem"), // path to TLS key
cert: Bun.file("./cert.pem"), // path to TLS cert
ca: Bun.file("./ca.pem"), // path to root CA certificate
keyFile: "./key.pem", // path to TLS key
certFile: "./cert.pem", // path to TLS cert
caFile: "./ca.pem", // path to root CA certificate
dhParamsFile: "./dhparams.pem", // Diffie Helman parameters
});
```
To override Diffie-Helman parameters:
```ts
Bun.serve({
// ...
dhParamsFile: "./dhparams.pem", // path to Diffie Helman parameters
});
```
## Hot reloading
### Hot reloading
Thus far, the examples on this page have used the explicit `Bun.serve` API. Bun also supports an alternate syntax.
@@ -182,7 +153,7 @@ $ bun --hot server.ts
It's possible to configure hot reloading while using the explicit `Bun.serve` API; for details refer to [Runtime > Hot reloading](/docs/runtime/hot).
## Streaming files
### Streaming files
To stream a file, return a `Response` object with a `BunFile` object as the body.
@@ -220,7 +191,7 @@ Bun.serve({
});
```
## Benchmarks
### Benchmarks
Below are Bun and Node.js implementations of a simple HTTP server that responds `Bun!` to each incoming `Request`.
@@ -263,7 +234,7 @@ The `Bun.serve` server can handle roughly 2.5x more requests per second than Nod
{% image width="499" alt="image" src="https://user-images.githubusercontent.com/709451/162389032-fc302444-9d03-46be-ba87-c12bd8ce89a0.png" /%}
## Reference
### Reference
{% details summary="See TypeScript definitions" %}

Some files were not shown because too many files have changed in this diff Show More