Compare commits

..

14 Commits

Author SHA1 Message Date
Colin McDonnell
7c9352392d Update readme and regen 2023-06-09 19:21:49 -07:00
Jarred Sumner
fef9853d5a wip 2023-06-05 02:02:13 -07:00
Jarred Sumner
7dae496847 wip 2023-06-04 19:59:22 -07:00
Jarred Sumner
92b060c6e2 fix some CLI things 2023-06-04 19:06:02 -07:00
Jarred Sumner
2a64e8b3bb fixup 2023-06-04 19:05:42 -07:00
Jarred Sumner
fde3b7fbb6 Fix build 2023-06-04 18:54:26 -07:00
Ashcon Partovi
c16e769383 Add types and sample heapsnapshot for JSC and V8 2023-06-04 18:54:26 -07:00
Ashcon Partovi
5badc728d0 Add a DevTools client 2023-06-04 18:54:26 -07:00
Jarred Sumner
c4b3b321c2 slightly more progress 2023-06-04 18:54:26 -07:00
Jarred Sumner
cf599e77d9 Fix C++ compile errors 2023-06-04 18:54:26 -07:00
Jarred Sumner
b727689a9b pushing this but it doesn't fix anything 2023-06-04 18:54:26 -07:00
Ashcon Partovi
4a36470588 Add script to generate DevTools protocol types for JSC and V8 2023-06-04 18:54:26 -07:00
Jarred Sumner
ca08cf6b0a Update BunInspector.cpp 2023-06-04 18:54:26 -07:00
Jarred Sumner
10bd0fac3a WIP support inspector 2023-06-04 18:54:26 -07:00
967 changed files with 115314 additions and 135544 deletions

View File

@@ -1,62 +0,0 @@
### What does this PR do?
<!-- **Please explain what your changes do**, example: -->
<!--
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
-->
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
- [ ] Code changes
### How did you verify your code works?
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
<!-- I wrote automated tests -->
<!-- If JavaScript/TypeScript modules or builtins changed:
- [ ] I ran `make js` and committed the transpiled changes
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
- [ ] I included a test for the new code, or an existing test covers it
-->
<!-- If Zig files changed:
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I or my editor ran `zig fmt` on the changed files
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
-->
<!-- If new methods, getters, or setters were added to a publicly exposed class:
- [ ] I added TypeScript types for the new methods, getters, or setters
-->
<!-- If dependencies in tests changed:
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
-->
<!-- If functions were added to exports.zig or bindings.zig
- [ ] I ran `make headers` to regenerate the C header file
-->
<!-- If \*.classes.ts files were added or changed:
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
-->
<!-- If a new builtin ESM/CJS module was added:
- [ ] I updated Aliases in `module_loader.zig` to include the new module
- [ ] I added a test that imports the module
- [ ] I added a test that require() the module
-->

View File

@@ -36,7 +36,7 @@ jobs:
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-linux-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64

View File

@@ -46,7 +46,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
@@ -54,7 +54,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
@@ -187,14 +187,10 @@ jobs:
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install

View File

@@ -117,7 +117,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -126,7 +126,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
@@ -135,7 +135,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -144,7 +144,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -152,7 +152,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -173,9 +173,9 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
@@ -257,7 +257,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -265,14 +265,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -291,8 +291,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
@@ -397,7 +397,7 @@ jobs:
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
timeout-minutes: 30
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -426,14 +426,10 @@ jobs:
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install

View File

@@ -117,7 +117,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -126,7 +126,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
@@ -135,7 +135,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -144,7 +144,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -152,7 +152,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -173,9 +173,9 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
@@ -258,7 +258,7 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -266,14 +266,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -292,8 +292,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
@@ -430,14 +430,10 @@ jobs:
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install

View File

@@ -117,7 +117,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -126,7 +126,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
@@ -135,7 +135,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -144,7 +144,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -152,7 +152,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -173,8 +173,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
if: matrix.compile_obj
@@ -260,7 +260,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -268,14 +268,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-july23/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -294,8 +294,8 @@ jobs:
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
env:
@@ -432,14 +432,10 @@ jobs:
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install

View File

@@ -1,4 +1,4 @@
name: bun-release-types-canary
name: bun-release-canary
concurrency: release-canary
on:
push:

View File

@@ -156,8 +156,8 @@ jobs:
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2

View File

@@ -1,7 +1,7 @@
name: zig-fmt
env:
ZIG_VERSION: 0.11.0-dev.4006+bf827d0b5
ZIG_VERSION: 0.11.0-dev.2571+31738de28
on:
pull_request:
@@ -28,7 +28,7 @@ jobs:
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |

4
.gitignore vendored
View File

@@ -121,7 +121,3 @@ cold-jsc-start
cold-jsc-start.d
/test.ts
src/js/out/modules_dev
make-dev-stats.csv

8
.gitmodules vendored
View File

@@ -68,10 +68,4 @@ fetchRecurseSubmodules = false
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty
[submodule "src/deps/base64"]
path = src/deps/base64
url = https://github.com/aklomp/base64.git
ignore = dirty
depth = 1
shallow = true
ignore = dirty

View File

@@ -6,7 +6,7 @@ module.exports = {
quoteProps: "preserve",
overrides: [
{
files: ["*.md"],
files: "README.md",
options: {
printWidth: 80,
},

View File

@@ -1,21 +0,0 @@
// I would have made this a bash script but there isn't an easy way to track
// time in bash sub-second cross platform.
import fs from "fs";
const start = Date.now() + 5;
const result = Bun.spawnSync(process.argv.slice(2), {
stdio: ["inherit", "inherit", "inherit"],
});
const end = Date.now();
const diff = (Math.max(Math.round(end - start), 0) / 1000).toFixed(3);
const success = result.exitCode === 0;
try {
const line = `${new Date().toISOString()}, ${success ? "success" : "fail"}, ${diff}\n`;
if (fs.existsSync(".scripts/make-dev-stats.csv")) {
fs.appendFileSync(".scripts/make-dev-stats.csv", line);
} else {
fs.writeFileSync(".scripts/make-dev-stats.csv", line);
}
} catch {
// Ignore
}
process.exit(result.exitCode);

View File

@@ -19,11 +19,11 @@
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src"
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
],
"browse": {
"path": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
@@ -39,7 +39,8 @@
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps/uws/uSockets/src"
"${workspaceFolder}/src/deps/uws/uSockets/src",
"${workspaceFolder}/src/deps/uws/src"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb"

34
.vscode/launch.json generated vendored
View File

@@ -14,8 +14,7 @@
"name": "bun test [file]",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -30,8 +29,7 @@
"name": "bun test [file] (fast)",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
@@ -46,8 +44,7 @@
"name": "bun test [file] (verbose)",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
@@ -60,8 +57,7 @@
"name": "bun test [file] --watch",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
@@ -75,8 +71,7 @@
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
@@ -105,7 +100,6 @@
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -120,7 +114,6 @@
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
@@ -143,6 +136,20 @@
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [Inspect]",
"program": "bun-debug",
"args": ["--inspect-brk", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -242,6 +249,7 @@
"console": "internalConsole",
"env": {}
},
{
"type": "lldb",
"request": "launch",
@@ -324,7 +332,7 @@
"name": "bun install",
"program": "bun-debug",
"args": ["install"],
"cwd": "${fileDirname}",
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"

View File

@@ -7,8 +7,6 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
// We do this until we upgrade to latest Zig so that zls doesn't break our code.
"zig.formattingProvider": "extension",
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",

View File

@@ -10,9 +10,9 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=2023-july23
ARG WEBKIT_TAG=may20
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.4006+bf827d0b5"
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -20,7 +20,7 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.7
ARG BUN_BASE_VERSION=0.6
FROM bitnami/minideb:bullseye as bun-base
@@ -295,27 +295,6 @@ WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make uws && rm -rf src/deps/uws Makefile
FROM bun-base as base64
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make base64 && rm -rf src/deps/base64 Makefile
FROM bun-base as picohttp
ARG DEBIAN_FRONTEND
@@ -577,7 +556,6 @@ ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/

View File

@@ -40,7 +40,7 @@ NATIVE_OR_OLD_MARCH = -march=nehalem
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 0.7
BUN_BASE_VERSION = 0.6
CI ?= false
@@ -52,8 +52,6 @@ endif
BUN_OR_NODE = $(shell which bun 2>/dev/null || which node 2>/dev/null)
CXX_VERSION=c++2a
TRIPLET = $(OS_NAME)-$(ARCH_NAME)
PACKAGE_NAME = bun-$(TRIPLET)
@@ -455,8 +453,7 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
-ldecrepit \
-lssl \
-lcrypto \
-llolhtml \
-lbase64
-llolhtml
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
-larchive \
@@ -564,9 +561,6 @@ builtins:
esm:
NODE_ENV=production bun src/js/build-esm.ts
esm-debug:
BUN_DEBUG_QUIET_LOGS=1 NODE_ENV=production bun-debug src/js/build-esm.ts
.PHONY: generate-builtins
generate-builtins: builtins
@@ -686,19 +680,8 @@ require:
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
@echo "You have the dependencies installed! Woo"
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
# unless you explicity clone it yourself (a huge download)
SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}')
ifeq ("$(wildcard src/bun.js/WebKit/.git)", "")
SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES))
endif
.PHONY: init-submodules
init-submodules: submodule # (backwards-compatibility alias)
.PHONY: submodule
submodule: ## to init or update all submodules
git submodule update --init --recursive --progress --depth=1 --checkout $(SUBMODULE_NAMES)
init-submodules:
git submodule update --init --recursive --progress --depth=1 --checkout
.PHONY: build-obj
build-obj:
@@ -1093,32 +1076,16 @@ test/wiptest/run: test/wiptest/run.o
release-bin-dir:
echo $(PACKAGE_DIR)
.PHONY: dev-obj-track
dev-obj-track:
bun .scripts/make-dev-timer.ts $(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj-notrack
dev-obj-notrack:
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj
dev-obj:
ifeq ($(shell which bun),)
dev-obj : dev-obj-notrack
else
dev-obj : dev-obj-track
endif
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj-linux
dev-obj-linux:
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
.PHONY: dev
dev: mkdir-dev esm dev-obj link ## compile zig changes + link bun
dev: mkdir-dev esm dev-obj bun-link-lld-debug
mkdir-dev:
mkdir -p $(DEBUG_PACKAGE_DIR)
@@ -1203,7 +1170,7 @@ jsc-build-mac-compile:
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DENABLE_FTL_JIT=ON \
@@ -1270,7 +1237,7 @@ jsc-build-linux-compile-config:
cmake \
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
@@ -1389,20 +1356,15 @@ mimalloc-wasm:
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
# alias for link, incase anyone still types that
.PHONY: bun-link-lld-debug
bun-link-lld-debug: link
.PHONY: link
link: ## link a debug build of bun
bun-link-lld-debug:
$(CXX) $(BUN_LLD_FLAGS_DEBUG) $(DEBUG_FLAGS) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
-W \
-o $(DEBUG_BIN)/bun-debug
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
link-no-jsc:
bun-link-lld-debug-no-jsc:
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
@@ -1489,7 +1451,7 @@ generate-classes:
generate-sink:
bun src/bun.js/scripts/generate-jssink.js
$(CLANG_FORMAT) -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
./src/bun.js/scripts/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(WEBKIT_DIR)/Source/JavaScriptCore/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/namespace JSC {//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/} \/\/ namespace JSC//' src/bun.js/bindings/JSSinkLookupTable.h
@@ -1723,7 +1685,7 @@ sizegen:
# Linux uses bundled SQLite3
ifeq ($(OS_NAME),linux)
sqlite:
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
endif
picohttp:
@@ -1819,7 +1781,7 @@ endif
endif
.PHONY: build-unit
build-unit: # to build your unit tests
build-unit: ## to build your unit tests
@rm -rf zig-out/bin/$(testname)
@mkdir -p zig-out/bin
zig test $(realpath $(testpath)) \
@@ -1837,7 +1799,7 @@ build-unit: # to build your unit tests
cp zig-out/bin/$(testname) $(testbinpath)
.PHONY: run-all-unit-tests
run-all-unit-tests: # to run your unit tests
run-all-unit-tests: ## to run your unit tests
@rm -rf zig-out/bin/__main_test
@mkdir -p zig-out/bin
zig test src/main.zig \
@@ -1857,11 +1819,15 @@ run-all-unit-tests: # to run your unit tests
run-unit:
@zig-out/bin/$(testname) $(ZIG)
.PHONY: help
help: ## to print this help
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
.PHONY: test
test: build-unit run-unit
.PHONY: integration-test-dev
integration-test-dev: # to run integration tests
integration-test-dev: ## to run integration tests
USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node test/scripts/browser.js
copy-install:
@@ -1875,10 +1841,6 @@ copy-to-bun-release-dir-bin:
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
.PHONY: base64
base64:
cd $(BUN_DEPS_DIR)/base64 && make clean && cmake $(CMAKE_FLAGS) . && make
cp $(BUN_DEPS_DIR)/base64/libbase64.a $(BUN_DEPS_OUT_DIR)/libbase64.a
.PHONY: cold-jsc-start
cold-jsc-start:
@@ -1897,23 +1859,22 @@ cold-jsc-start:
misctools/cold-jsc-start.cpp -o cold-jsc-start
.PHONY: vendor-without-npm
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd
.PHONY: vendor-without-check
vendor-without-check: npm-install vendor-without-npm
.PHONY: vendor
vendor: require submodule vendor-without-check
vendor: require init-submodules vendor-without-check
.PHONY: vendor-dev
vendor-dev: require submodule npm-install-dev vendor-without-npm
vendor-dev: require init-submodules npm-install-dev vendor-without-npm
.PHONY: bun
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
.PHONY: regenerate-bindings
regenerate-bindings: ## compile src/js/builtins + all c++ code, does not link
regenerate-bindings:
@make clean-bindings builtins
@make bindings -j$(CPU_COUNT)
@@ -1925,8 +1886,3 @@ setup: vendor-dev identifier-cache clean-bindings
@echo "Development environment setup complete"
@echo "Run \`make dev\` to build \`bun-debug\`"
@echo ""
.PHONY: help
help: ## to print this help
@echo "For detailed build instructions, see https://bun.sh/docs/project/development"
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)

View File

@@ -1,30 +0,0 @@
// https://github.com/nodejs/node/issues/34493
import { AsyncLocalStorage } from "async_hooks";
const asyncLocalStorage = new AsyncLocalStorage();
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
let fn = () => /test/.test("test");
let runWithExpiry = async (expiry, fn) => {
let iterations = 0;
while (Date.now() < expiry) {
await fn();
iterations++;
}
return iterations;
};
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
let withAls;
await asyncLocalStorage.run(123, async () => {
withAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
});
asyncLocalStorage.disable();
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");

View File

@@ -3,6 +3,6 @@
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.7.0"
"bun-types": "^0.5.0"
}
}
}

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -125,30 +125,30 @@ pub fn main() anyerror!void {
);
};
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -112,30 +112,30 @@ pub fn main() anyerror!void {
\\
++ SIMULATE_LONG_FILE;
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -5,4 +5,4 @@
"jsx": "react-jsx",
"paths": {}
}
}
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("noop", function () {});
bench("async function(){}", async function () {});

View File

@@ -1,14 +0,0 @@
import { bench, run } from "./runner.mjs";
import { Buffer } from "node:buffer";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");
bench("Buffer.toString('base64')", () => {
return bigBuffer.toString("base64");
});
// bench("Buffer.from(str, 'base64')", () => {
// return Buffer.from(converted, "base64");
// });
await run();

View File

@@ -1,6 +1,12 @@
// so it can run in environments without node module resolution
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import crypto from "node:crypto";
var crypto = globalThis.crypto;
if (!crypto) {
crypto = await import("node:crypto");
}
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);
@@ -16,8 +22,4 @@ bench("crypto.randomUUID()", () => {
return crypto.randomUUID()[2];
});
bench("crypto.randomInt()", () => {
return crypto.randomInt(0, 100);
});
await run();

View File

@@ -1,12 +0,0 @@
import { bench, run } from "./runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {
Error.captureStackTrace(err);
});
bench("Error.prototype.stack", () => {
new Error().stack;
});
await run();

View File

@@ -1,65 +0,0 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
return 42;
},
set() {
throw new Error("bad");
},
configurable: true,
});
if (module.exports !== 42) throw new Error("bad");
if (!Object.getOwnPropertyDescriptor(module, "exports").get) throw new Error("bad");
});
bench("Object.defineProperty(module.exports = {})", () => {
Object.defineProperty(module, "exports", {
value: { abc: 123 },
});
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
bench("module.exports = {}", () => {
module.exports = { abc: 123 };
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
run().then(() => {
module.exports = {
a: 1,
};
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
module.exports = function lol() {
return 42;
};
console.log(module.exports, module.exports());
queueMicrotask(() => {
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
});
});
});

View File

@@ -1,33 +0,0 @@
import { bench, run } from "./runner.mjs";
import { performance } from "perf_hooks";
bench("process.memoryUsage()", () => {
process.memoryUsage();
});
bench("process.memoryUsage.rss()", () => {
process.memoryUsage.rss();
});
bench("process.cpuUsage()", () => {
process.cpuUsage();
});
const init = process.cpuUsage();
bench("process.cpuUsage(delta)", () => {
process.cpuUsage(init);
});
bench("performance.now()", () => {
performance.now();
});
bench("process.hrtime()", () => {
process.hrtime();
});
bench("process.hrtime.bigint()", () => {
process.hrtime.bigint();
});
await run();

View File

@@ -1,17 +0,0 @@
import { bench, run } from "./runner.mjs";
import { readFileSync, existsSync } from "node:fs";
import { readFile } from "node:fs/promises";
bench(`readFileSync(/tmp/404-not-found)`, () => {
try {
readFileSync("/tmp/404-not-found");
} catch (e) {}
});
bench(`readFile(/tmp/404-not-found)`, async () => {
try {
await readFile("/tmp/404-not-found");
} catch (e) {}
});
await run();

View File

@@ -1,10 +1,4 @@
import { realpathSync } from "node:fs";
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
const arg = process.argv[process.argv.length - 1];
import { bench, run } from "./runner.mjs";
bench("realpathSync x " + count, () => {
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
});
await run();
for (let i = 0; i < count; i++) realpathSync(arg);

View File

@@ -1,136 +0,0 @@
// This snippet mostly exists to reproduce a memory leak
//
import { bench, run } from "mitata";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
// Force the string to be 8bit
const str = String.fromCharCode(
...JSON.stringify(obj)
.split("")
.map(a => a.charCodeAt(0)),
);
var i = 0;
bench("new Response().arrayBuffer() (new string each call, latin1)", async () => {
return await new Response(str + i++).arrayBuffer();
});
bench("new Response().arrayBuffer() (new string each call, utf16)", async () => {
return await new Response(str + i++ + "😊").arrayBuffer();
});
bench("new Response().arrayBuffer() (existing string, latin1)", async () => {
return await new Response(str).arrayBuffer();
});
await run();

View File

@@ -1,123 +0,0 @@
// This snippet mostly exists to reproduce a memory leak
import { bench, run } from "mitata";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
bench("Response.json(obj)", async () => {
return Response.json(obj);
});
bench("Response.json(obj).json()", async () => {
return await Response.json(obj).json();
});
await run();

View File

@@ -1,128 +0,0 @@
import { serialize, deserialize } from "node:v8";
import { bench, run } from "./runner.mjs";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
bench("serialize", () => {
serialize(obj);
});
const serialized = serialize(obj);
bench("deserialize", () => {
deserialize(serialized);
});
if (typeof Bun !== "undefined") {
if (!Bun.deepEquals(obj, deserialize(serialized))) {
throw new Error("not equal");
}
}
await run();

View File

@@ -1,39 +0,0 @@
var testArray = [
{
description: "Random description.",
testNumber: 123456789,
testBoolean: true,
testObject: {
testString: "test string",
testNumber: 12345,
},
testArray: [
{
myName: "test name",
myNumber: 123245,
},
],
},
{
description: "Random description.",
testNumber: 123456789,
testBoolean: true,
testObject: {
testString: "test string",
testNumber: 12345,
},
testArray: [
{
myName: "test name",
myNumber: 123245,
},
],
},
];
import { bench, run } from "./runner.mjs";
bench("structuredClone(array)", () => structuredClone(testArray));
bench("structuredClone(123)", () => structuredClone(123));
bench("structuredClone({a: 123})", () => structuredClone({ a: 123 }));
await run();

View File

@@ -1,60 +0,0 @@
import { bench, run } from "./runner.mjs";
function deprecateUsingClosure(fn, msg, code) {
if (process.noDeprecation === true) {
return fn;
}
var realFn = fn;
var wrapper = () => {
return fnToWrap.apply(this, arguments);
};
var deprecater = () => {
if (process.throwDeprecation) {
var err = new Error(msg);
if (code) err.code = code;
throw err;
} else if (process.traceDeprecation) {
console.trace(msg);
} else {
console.error(msg);
}
fnToWrap = realFn;
return realFn.apply(this, arguments);
};
var fnToWrap = deprecater;
return wrapper;
}
function deprecateOriginal(fn, msg) {
var warned = false;
function deprecated() {
if (!warned) {
if (process.throwDeprecation) {
throw new Error(msg);
} else if (process.traceDeprecation) {
console.trace(msg);
} else {
console.error(msg);
}
warned = true;
}
return fn.apply(this, arguments);
}
return deprecated;
}
const deprecatedy = deprecateUsingClosure(() => {}, "This is deprecated", "DEP0001");
const deprecatedy2 = deprecateOriginal(() => {}, "This is deprecated");
bench("deprecateUsingClosure", () => {
deprecatedy(Math.random() + 1);
});
bench("deprecateOriginal", () => {
deprecatedy2(Math.random() + 1);
});
await run();

View File

@@ -1,20 +0,0 @@
import { group } from "mitata";
import { bench, run } from "./runner.mjs";
const sizes = [
["small (63 bytes)", 63],
["medium (4096 bytes)", 4096],
["large (64 MB)", 64 * 1024 * 1024],
];
for (let [name, size] of sizes) {
group(name, () => {
var buf = new Uint8Array(size);
for (let algorithm of ["SHA-1", "SHA-256", "SHA-384", "SHA-512"]) {
bench(algorithm, async () => {
await crypto.subtle.digest(algorithm, buf);
});
}
});
}
await run();

Binary file not shown.

View File

@@ -1,7 +1,7 @@
{
"name": "bench",
"dependencies": {
"better-sqlite3": "8.5.0"
"better-sqlite3": "^8.0.1"
},
"scripts": {
"build": "exit 0",

View File

@@ -2,7 +2,7 @@
This benchmarks a websocket server intended as a simple but very active chat room.
First, start the server. By default, it will wait for 32 clients which the client script will handle.
First, start the server. By default, it will wait for 16 clients which the client script will handle.
Run in Bun (`Bun.serve`):
@@ -19,10 +19,10 @@ node ./chat-server.node.mjs
Run in Deno (`Deno.serve`):
```bash
deno run -A ./chat-server.deno.mjs
deno run -A --unstable ./chat-server.deno.mjs
```
Then, run the client script. By default, it will connect 32 clients. This client script can run in Bun, Node, or Deno
Then, run the client script. By default, it will connect 16 clients. This client script can run in Bun, Node, or Deno
```bash
node ./chat-client.mjs
@@ -34,4 +34,6 @@ For example, when the client sends `"foo"`, the server sends back `"John: foo"`
The client script waits until it receives all the messages for each client before sending the next batch of messages.
TODO: once Deno lands their performance improvements, increase the client count (it was originally going to be 32 or 64, but that would've exluded Deno from the benchmark)
This project was created using `bun init` in bun v0.2.1. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

Binary file not shown.

View File

@@ -3,7 +3,7 @@ const env = "process" in globalThis ? process.env : "Deno" in globalThis ? Deno.
const SERVER = env.SERVER || "ws://0.0.0.0:4001";
const WebSocket = globalThis.WebSocket || (await import("ws")).WebSocket;
const LOG_MESSAGES = env.LOG_MESSAGES === "1";
const CLIENTS_TO_WAIT_FOR = parseInt(env.CLIENTS_COUNT || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(env.CLIENTS_COUNT || "", 10) || 16;
const DELAY = 64;
const MESSAGES_TO_SEND = Array.from({ length: 32 }, () => [
"Hello World!",

View File

@@ -1,5 +1,5 @@
// See ./README.md for instructions on how to run this benchmark.
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 16;
var remainingClients = CLIENTS_TO_WAIT_FOR;
const COMPRESS = process.env.COMPRESS === "1";
const port = process.PORT || 4001;
@@ -32,7 +32,6 @@ const server = Bun.serve({
},
perMessageDeflate: false,
publishToSelf: true,
},
fetch(req, server) {

View File

@@ -1,6 +1,6 @@
// See ./README.md for instructions on how to run this benchmark.
const port = Deno.env.get("PORT") || 4001;
const CLIENTS_TO_WAIT_FOR = parseInt(Deno.env.get("CLIENTS_COUNT") || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(Deno.env.get("CLIENTS_COUNT") || "", 10) || 16;
var clients = [];
async function reqHandler(req) {
@@ -42,4 +42,4 @@ function sendReadyMessage() {
console.log(`Waiting for ${CLIENTS_TO_WAIT_FOR} clients to connect..`);
Deno.serve({ port }, reqHandler);
Deno.serve(reqHandler, { port });

View File

@@ -1,6 +1,6 @@
// See ./README.md for instructions on how to run this benchmark.
const port = process.env.PORT || 4001;
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 32;
const CLIENTS_TO_WAIT_FOR = parseInt(process.env.CLIENTS_COUNT || "", 10) || 16;
import { createRequire } from "module";
const require = createRequire(import.meta.url);

View File

@@ -3,8 +3,8 @@
"module": "index.ts",
"type": "module",
"dependencies": {
"bufferutil": "4.0.7",
"utf-8-validate": "6.0.3",
"ws": "8.13.0"
"bufferutil": "^4.0.7",
"utf-8-validate": "^5.0.10",
"ws": "^8.9.0"
}
}

View File

@@ -1,6 +1,5 @@
const std = @import("std");
const Wyhash = @import("./src/wyhash.zig").Wyhash;
var is_debug_build = false;
fn moduleSource(comptime out: []const u8) FileSource {
if (comptime std.fs.path.dirname(@src().file)) |base| {
const outpath = comptime base ++ std.fs.path.sep_str ++ out;
@@ -9,6 +8,27 @@ fn moduleSource(comptime out: []const u8) FileSource {
return FileSource.relative(out);
}
}
pub fn addPicoHTTP(step: *CompileStep, comptime with_obj: bool) void {
step.addIncludePath("src/deps");
if (with_obj) {
step.addObjectFile("src/deps/picohttpparser.o");
}
step.addIncludePath("src/deps");
if (with_obj) {
step.addObjectFile(panicIfNotFound("src/deps/picohttpparser.o"));
step.addObjectFile(panicIfNotFound("src/deps/libssl.a"));
step.addObjectFile(panicIfNotFound("src/deps/libcrypto.a"));
}
// step.add("/Users/jarred/Code/WebKit/WebKitBuild/Release/lib/libWTF.a");
// ./Tools/Scripts/build-jsc --jsc-only --cmakeargs="-DENABLE_STATIC_JSC=ON"
// set -gx ICU_INCLUDE_DIRS "/usr/local/opt/icu4c/include"
// homebrew-provided icu4c
}
const color_map = std.ComptimeStringMap([]const u8, .{
&.{ "black", "30m" },
@@ -55,31 +75,18 @@ const BunBuildOptions = struct {
fallback_html_version: u64 = 0,
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const runtime_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
this.runtime_js_version = runtime_hash;
} else |_| {
if (!is_debug_build) {
@panic("Runtime file was not read successfully. Please run `make setup`");
}
}
if (std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only })) |file| {
defer file.close();
const fallback_hash = Wyhash.hash(
0,
try file.readToEndAlloc(std.heap.page_allocator, try file.getEndPos()),
);
this.fallback_html_version = fallback_hash;
} else |_| {
if (!is_debug_build) {
@panic("Fallback file was not read successfully. Please run `make setup`");
}
}
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
const runtime_hash = std.hash.Wyhash.hash(
0,
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
);
this.runtime_js_version = runtime_hash;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
const fallback_hash = std.hash.Wyhash.hash(
0,
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
);
this.fallback_html_version = fallback_hash;
}
pub fn step(this: BunBuildOptions, b: anytype) *std.build.OptionsStep {
@@ -179,19 +186,18 @@ pub fn build(b: *Build) !void {
}
std.fs.cwd().makePath(output_dir) catch {};
is_debug_build = optimize == OptimizeMode.Debug;
const bun_executable_name = if (optimize == std.builtin.OptimizeMode.Debug) "bun-debug" else "bun";
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
"src/main_wasm.zig"
else
"root.zig";
const min_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMin().semver
else
.{ .major = 0, .minor = 0, .patch = 0 };
const max_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
const max_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMax().semver
else
.{ .major = 0, .minor = 0, .patch = 0 };
@@ -240,6 +246,7 @@ pub fn build(b: *Build) !void {
};
{
addPicoHTTP(obj, false);
obj.setMainPkgPath(b.pathFromRoot("."));
try addInternalPackages(
@@ -460,6 +467,8 @@ pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, comptime Ta
// obj.setTarget(target);
try addInternalPackages(b, obj, std.heap.page_allocator, b.zig_exe, target);
if (target.getOsTag() != .freestanding)
addPicoHTTP(obj, false);
obj.strip = false;

BIN
bun.lockb

Binary file not shown.

View File

@@ -92,10 +92,10 @@ _bun_completions() {
PACKAGE_OPTIONS[REMOVE_OPTIONS_LONG]="";
PACKAGE_OPTIONS[REMOVE_OPTIONS_SHORT]="";
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --frozen-lockfile --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
PACKAGE_OPTIONS[SHARED_OPTIONS_SHORT]="-c -y -p -f -g";
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --frozen-lockfile --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
PM_OPTIONS[SHORT_OPTIONS]="-c -y -p -f -g"
local cur_word="${COMP_WORDS[${COMP_CWORD}]}";

View File

@@ -47,7 +47,6 @@ _bun() {
'-g[Add a package globally]' \
'--global[Add a package globally]' \
'--production[Don'"'"'t install devDependencies]' \
'--frozen-lockfile[Disallow changes to lockfile]' \
'--optional[Add dependency to optionalDependencies]' \
'--development[Add dependency to devDependencies]' \
'-d[Add dependency to devDependencies]' \
@@ -89,7 +88,6 @@ _bun() {
'--yarn[Write a yarn.lock file (yarn v1)]' \
'--global[Add a package globally]' \
'--production[Don'"'"'t install devDependencies]' \
'--frozen-lockfile[Disallow changes to lockfile]' \
'--optional[Add dependency to optionalDependencies]' \
'--development[Add dependency to devDependencies]' \
'-d[Add dependency to devDependencies]' \
@@ -125,7 +123,6 @@ _bun() {
'--yarn[Write a yarn.lock file (yarn v1)]' \
'--global[Add a package globally]' \
'--production[Don'"'"'t install devDependencies]' \
'--frozen-lockfile[Disallow changes to lockfile]' \
'--optional[Add dependency to optionalDependencies]' \
'--development[Add dependency to devDependencies]' \
'-d[Add dependency to devDependencies]' \
@@ -281,7 +278,6 @@ _bun() {
'--yarn[Write a yarn.lock file (yarn v1)]'
'-p[Do not install devDependencies]'
'--production[Do not install devDependencies]'
'--frozen-lockfile[Disallow changes to lockfile]' \
'--no-save[Do not save a lockfile]'
'--dry-run[Do not install anything]'
'--lockfile[Store & load a lockfile at a specific filepath]'
@@ -536,7 +532,6 @@ _bun() {
'--yarn[Write a yarn.lock file (yarn v1)]' \
'--production[Don'"'"'t install devDependencies]' \
'-p[Don'"'"'t install devDependencies]' \
'--frozen-lockfile[Disallow changes to lockfile]' \
'--no-save[]' \
'--dry-run[Don'"'"'t install anything]' \
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
@@ -570,7 +565,6 @@ _bun() {
'--yarn[Write a yarn.lock file (yarn v1)]' \
'--production[Don'"'"'t install devDependencies]' \
'-p[Don'"'"'t install devDependencies]' \
'--frozen-lockfile[Disallow changes to lockfile]' \
'--no-save[]' \
'--dry-run[Don'"'"'t install anything]' \
'-g[Remove a package globally]' \

View File

@@ -115,7 +115,6 @@ subcommands:
- yarn -- "Write a yarn.lock file (yarn v1)"
- production -- "Don't install devDependencies"
- p -- "Don't install devDependencies"
- frozen-lockfile -- "Disallow changes to lockfile"
- no-save --
- dry-run -- "Don't install anything"
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
@@ -153,7 +152,6 @@ subcommands:
- development -- "Add dependency to devDependencies"
- d -- "Add dependency to devDependencies"
- p -- "Don't install devDependencies"
- frozen-lockfile -- "Disallow changes to lockfile"
- no-save --
- dry-run -- "Don't install anything"
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
@@ -194,7 +192,6 @@ subcommands:
- yarn -- "Write a yarn.lock file (yarn v1)"
- production -- "Don't install devDependencies"
- p -- "Don't install devDependencies"
- frozen-lockfile -- "Disallow changes to lockfile"
- no-save --
- dry-run -- "Don't install anything"
- force -- "Always request the latest versions from the registry & reinstall all dependenices"

View File

@@ -202,53 +202,6 @@ const response = await fetch("https://bun.sh");
await Bun.write("index.html", response);
```
## Incremental writing with `FileSink`
Bun provides a native incremental file writing API called `FileSink`. To retrieve a `FileSink` instance from a `BunFile`:
```ts
const file = Bun.file("output.txt");
const writer = file.writer();
```
To incrementally write to the file, call `.write()`.
```ts
const file = Bun.file("output.txt");
const writer = file.writer();
writer.write("it was the best of times\n");
writer.write("it was the worst of times\n");
```
These chunks will be buffered internally. To flush the buffer to disk, use `.flush()`. This returns the number of flushed bytes.
```ts
writer.flush(); // write buffer to disk
```
The buffer will also auto-flush when the `FileSink`'s _high water mark_ is reached; that is, when its internal buffer is full. This value can be configured.
```ts
const file = Bun.file("output.txt");
const writer = file.writer({ highWaterMark: 1024 * 1024 }); // 1MB
```
To flush the buffer and close the file:
```ts
writer.end();
```
Note that, by default, the `bun` process will stay alive until this `FileSink` is explicitly closed with `.end()`. To opt out of this behavior, you can "unref" the instance.
```ts
writer.unref();
// to "re-ref" it later
writer.ref();
```
## Benchmarks
The following is a 3-line implementation of the Linux `cat` command.
@@ -285,13 +238,7 @@ interface Bun {
write(
destination: string | number | BunFile | URL,
input:
| string
| Blob
| ArrayBuffer
| SharedArrayBuffer
| TypedArray
| Response,
input: string | Blob | ArrayBuffer | SharedArrayBuffer | TypedArray | Response,
): Promise<number>;
}
@@ -303,17 +250,5 @@ interface BunFile {
stream(): Promise<ReadableStream>;
arrayBuffer(): Promise<ArrayBuffer>;
json(): Promise<any>;
writer(params: { highWaterMark?: number }): FileSink;
}
export interface FileSink {
write(
chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
): number;
flush(): number | Promise<number>;
end(error?: Error): number | Promise<number>;
start(options?: { highWaterMark?: number }): void;
ref(): void;
unref(): void;
}
```

View File

@@ -4,71 +4,6 @@ Bun implements the `createHash` and `createHmac` functions from [`node:crypto`](
{% /callout %}
## `Bun.password`
{% callout %}
**Note** — Added in Bun 0.6.8.
{% /callout %}
`Bun.password` is a collection of utility functions for hashing and verifying passwords with various cryptographically secure algorithms.
```ts
const password = "super-secure-pa$$word";
const hash = await Bun.password.hash(password);
// => $argon2id$v=19$m=65536,t=2,p=1$tFq+9AVr1bfPxQdh6E8DQRhEXg/M/SqYCNu6gVdRRNs$GzJ8PuBi+K+BVojzPfS5mjnC8OpLGtv8KJqF99eP6a4
const isMatch = await Bun.password.verify(password, hash);
// => true
```
The second argument to `Bun.password.hash` accepts a params object that lets you pick and configure the hashing algorithm.
```ts
const password = "super-secure-pa$$word";
// use argon2 (default)
const argonHash = await Bun.password.hash(password, {
algorithm: "argon2id", // "argon2id" | "argon2i" | "argon2d"
memoryCost: 4, // memory usage in kibibytes
timeCost: 3, // the number of iterations
});
// use bcrypt
const bcryptHash = await Bun.password.hash(password, {
algorithm: "bcrypt",
cost: 4, // number between 4-31
});
```
The algorithm used to create the hash is stored in the hash itself. When using `bcrypt`, the returned hash is encoded in [Modular Crypt Format](https://passlib.readthedocs.io/en/stable/modular_crypt_format.html) for compatibility with most existing `bcrypt` implementations; with `argon2` the result is encoded in the newer [PHC format](https://github.com/P-H-C/phc-string-format/blob/master/phc-sf-spec.md).
The `verify` function automatically detects the algorithm based on the input hash and use the correct verification method. It can correctly infer the algorithm from both PHC- or MCF-encoded hashes.
```ts
const password = "super-secure-pa$$word";
const hash = await Bun.password.hash(password, {
/* config */
});
const isMatch = await Bun.password.verify(password, hash);
// => true
```
Synchronous versions of all functions are also available. Keep in mind that these functions are computationally expensive, so using a blocking API may degrade application performance.
```ts
const password = "super-secure-pa$$word";
const hash = Bun.password.hashSync(password, {
/* config */
});
const isMatch = Bun.password.verifySync(password, hash);
// => true
```
## `Bun.hash`
`Bun.hash` is a collection of utilities for _non-cryptographic_ hashing. Non-cryptographic hashing algorithms are optimized for speed of computation over collision-resistance or security.

View File

@@ -67,7 +67,7 @@ Bun.serve({
fetch(req) {
throw new Error("woops!");
},
error(error) {
error(error: Error) {
return new Response(`<pre>${error}\n${error.stack}</pre>`, {
headers: {
"Content-Type": "text/html",
@@ -95,37 +95,38 @@ server.stop();
## TLS
Bun supports TLS out of the box, powered by [BoringSSL](https://boringssl.googlesource.com/boringssl). Enable TLS by passing in a value for `key` and `cert`; both are required to enable TLS.
Bun supports TLS out of the box, powered by [OpenSSL](https://www.openssl.org/). Enable TLS by passing in a value for `key` and `cert`; both are required to enable TLS. If needed, supply a `passphrase` to decrypt the `keyFile`.
```ts-diff
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
```ts
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
+ tls: {
+ key: Bun.file("./key.pem"),
+ cert: Bun.file("./cert.pem"),
+ }
});
// can be string, BunFile, TypedArray, Buffer, or array thereof
key: Bun.file("./key.pem"),
cert: Bun.file("./cert.pem"),
// passphrase, only required if key is encrypted
passphrase: "super-secret",
});
```
The `key` and `cert` fields expect the _contents_ of your TLS key and certificate, _not a path to it_. This can be a string, `BunFile`, `TypedArray`, or `Buffer`.
The `key` and `cert` fields expect the _contents_ of your TLS key and certificate. This can be a string, `BunFile`, `TypedArray`, or `Buffer`.
```ts
Bun.serve({
fetch() {},
tls: {
// BunFile
key: Bun.file("./key.pem"),
// Buffer
key: fs.readFileSync("./key.pem"),
// string
key: fs.readFileSync("./key.pem", "utf8"),
// array of above
key: [Bun.file("./key1.pem"), Bun.file("./key2.pem")],
},
// BunFile
key: Bun.file("./key.pem"),
// Buffer
key: fs.readFileSync("./key.pem"),
// string
key: fs.readFileSync("./key.pem", "utf8"),
// array of above
key: [Bun.file('./key1.pem'), Bun.file('./key2.pem']
});
```
@@ -135,35 +136,17 @@ Bun.serve({
{% /callout %}
If your private key is encrypted with a passphrase, provide a value for `passphrase` to decrypt it.
```ts-diff
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
tls: {
key: Bun.file("./key.pem"),
cert: Bun.file("./cert.pem"),
+ passphrase: "my-secret-passphrase",
}
});
```
Optionally, you can override the trusted CA certificates by passing a value for `ca`. By default, the server will trust the list of well-known CAs curated by Mozilla. When `ca` is specified, the Mozilla list is overwritten.
```ts-diff
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
tls: {
key: Bun.file("./key.pem"), // path to TLS key
cert: Bun.file("./cert.pem"), // path to TLS cert
+ ca: Bun.file("./ca.pem"), // path to root CA certificate
}
});
```ts
Bun.serve({
fetch(req) {
return new Response("Hello!!!");
},
key: Bun.file("./key.pem"), // path to TLS key
cert: Bun.file("./cert.pem"), // path to TLS cert
ca: Bun.file("./ca.pem"), // path to root CA certificate
});
```
To override Diffie-Helman parameters:
@@ -171,10 +154,7 @@ To override Diffie-Helman parameters:
```ts
Bun.serve({
// ...
tls: {
// other config
dhParamsFile: "/path/to/dhparams.pem", // path to Diffie Helman parameters
},
dhParamsFile: "./dhparams.pem", // path to Diffie Helman parameters
});
```
@@ -295,21 +275,11 @@ interface Bun {
port?: number;
development?: boolean;
error?: (error: Error) => Response | Promise<Response>;
tls?: {
key?:
| string
| TypedArray
| BunFile
| Array<string | TypedArray | BunFile>;
cert?:
| string
| TypedArray
| BunFile
| Array<string | TypedArray | BunFile>;
ca?: string | TypedArray | BunFile | Array<string | TypedArray | BunFile>;
passphrase?: string;
dhParamsFile?: string;
};
keyFile?: string;
certFile?: string;
caFile?: string;
dhParamsFile?: string;
passphrase?: string;
maxRequestBodySize?: number;
lowMemoryMode?: boolean;
}): Server;

View File

@@ -12,7 +12,7 @@ The second argument to `Bun.spawn` is a parameters object that can be used to co
```ts
const proc = Bun.spawn(["echo", "hello"], {
cwd: "./path/to/subdir", // specify a working directory
cwd: "./path/to/subdir", // specify a working direcory
env: { ...process.env, FOO: "bar" }, // specify environment variables
onExit(proc, exitCode, signalCode, error) {
// exit handler

View File

@@ -75,7 +75,7 @@ Note: `close()` is called automatically when the database is garbage collected.
```ts
const olddb = new Database("mydb.sqlite");
const contents = olddb.serialize(); // => Uint8Array
const newdb = Database.deserialize(contents);
const newdb = new Database(contents);
```
Internally, `.serialize()` calls [`sqlite3_serialize`](https://www.sqlite.org/c3ref/serialize.html).
@@ -250,7 +250,7 @@ Transactions are a mechanism for executing multiple queries in an _atomic_ way;
```ts
const insertCat = db.prepare("INSERT INTO cats (name) VALUES ($name)");
const insertCats = db.transaction(cats => {
const insertCats = db.transaction((cats) => {
for (const cat of cats) insertCat.run(cat);
});
```
@@ -261,7 +261,7 @@ To execute the transaction, call this function. All arguments will be passed thr
```ts
const insert = db.prepare("INSERT INTO cats (name) VALUES ($name)");
const insertCats = db.transaction(cats => {
const insertCats = db.transaction((cats) => {
for (const cat of cats) insert.run(cat);
return cats.length;
});
@@ -296,11 +296,11 @@ const insertExpense = db.prepare(
"INSERT INTO expenses (note, dollars) VALUES (?, ?)",
);
const insert = db.prepare("INSERT INTO cats (name, age) VALUES ($name, $age)");
const insertCats = db.transaction(cats => {
const insertCats = db.transaction((cats) => {
for (const cat of cats) insert.run(cat);
});
const adopt = db.transaction(cats => {
const adopt = db.transaction((cats) => {
insertExpense.run("adoption fees", 20);
insertCats(cats); // nested transaction
});

View File

@@ -28,6 +28,8 @@ for await (const chunk of stream) {
}
```
For a more complete discusson of streams in Bun, see [API > Streams](/docs/api/streams).
## Direct `ReadableStream`
Bun implements an optimized version of `ReadableStream` that avoid unnecessary data copying & queue management logic. With a traditional `ReadableStream`, chunks of data are _enqueued_. Each chunk is copied into a queue, where it sits until the stream is ready to send more data.
@@ -152,9 +154,7 @@ export class ArrayBufferSink {
stream?: boolean;
}): void;
write(
chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
): number;
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
/**
* Flush the internal buffer
*

View File

@@ -75,6 +75,16 @@ Bun.sleepSync(1000); // blocks thread for one second
console.log("hello one second later!");
```
Alternatively, pass a `Date` object to receive a `Promise` that resolves at that point in time.
```ts
const oneSecondInFuture = new Date(Date.now() + 1000);
console.log("hello");
await Bun.sleep(oneSecondInFuture);
console.log("hello one second later!");
```
## `Bun.which()`
`Bun.which(bin: string)`
@@ -149,9 +159,7 @@ test("peek", () => {
// If we peek a rejected promise, it:
// - returns the error
// - does not mark the promise as handled
const rejected = Promise.reject(
new Error("Successfully tested promise rejection"),
);
const rejected = Promise.reject(new Error("Successfully tested promise rejection"));
expect(peek(rejected).message).toBe("Successfully tested promise rejection");
});
```
@@ -247,7 +255,7 @@ Bun.deepEquals(new Foo(), { a: 1 }, true); // false
## `Bun.escapeHTML()`
`Bun.escapeHTML(value: string | object | number | boolean): string`
`Bun.escapeHTML(value: string | object | number | boolean): boolean`
Escapes the following characters from an input string:
@@ -285,7 +293,7 @@ console.log(url); // "file:///foo/bar.txt"
## `Bun.gzipSync()`
Compresses a `Uint8Array` using zlib's GZIP algorithm.
Compresses a `Uint8Array` using zlib's DEFLATE algorithm.
```ts
const buf = Buffer.from("hello".repeat(100)); // Buffer extends Uint8Array
@@ -374,7 +382,7 @@ export type ZlibCompressionOptions = {
## `Bun.gunzipSync()`
Decompresses a `Uint8Array` using zlib's GUNZIP algorithm.
Uncompresses a `Uint8Array` using zlib's INFLATE algorithm.
```ts
const buf = Buffer.from("hello".repeat(100)); // Buffer extends Uint8Array
@@ -402,15 +410,15 @@ The second argument supports the same set of configuration options as [`Bun.gzip
## `Bun.inflateSync()`
Decompresses a `Uint8Array` using zlib's INFLATE algorithm.
Uncompresses a `Uint8Array` using zlib's INFLATE algorithm.
```ts
const buf = Buffer.from("hello".repeat(100));
const compressed = Bun.deflateSync(buf);
const dec = new TextDecoder();
const decompressed = Bun.inflateSync(compressed);
dec.decode(decompressed);
const uncompressed = Bun.inflateSync(compressed);
dec.decode(uncompressed);
// => "hellohellohello..."
```
@@ -460,12 +468,6 @@ await Bun.readableStreamToText(stream);
// returns all chunks as an array
await Bun.readableStreamToArray(stream);
// => unknown[]
// returns all chunks as a FormData object (encoded as x-www-form-urlencoded)
await Bun.readableStreamToFormData(stream);
// returns all chunks as a FormData object (encoded as multipart/form-data)
await Bun.readableStreamToFormData(stream, multipartFormBoundary);
```
## `Bun.resolveSync()`
@@ -492,17 +494,3 @@ To resolve relative to the directory containing the current file, pass `import.m
```ts
Bun.resolveSync("./foo.ts", import.meta.dir);
```
## `serialize` & `deserialize` in `bun:jsc`
To save a JavaScript value into an ArrayBuffer & back, use `serialize` and `deserialize` from the `"bun:jsc"` module.
```js
import { serialize, deserialize } from "bun:jsc";
const buf = serialize({ foo: "bar" });
const obj = deserialize(buf);
console.log(obj); // => { foo: "bar" }
```
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.

View File

@@ -12,7 +12,41 @@
Internally Bun's WebSocket implementation is built on [uWebSockets](https://github.com/uNetworking/uWebSockets).
{% /callout %}
## Start a WebSocket server
## Connect to a WebSocket server
To connect to an external socket server, create an instance of `WebSocket` with the constructor.
```ts
const socket = new WebSocket("ws://localhost:3000");
```
Bun supports setting custom headers. This is a Bun-specific extension of the `WebSocket` standard.
```ts
const socket = new WebSocket("ws://localhost:3000", {
headers: {
// custom headers
},
});
```
To add event listeners to the socket:
```ts
// message is received
socket.addEventListener("message", event => {});
// socket opened
socket.addEventListener("open", event => {});
// socket closed
socket.addEventListener("close", event => {});
// error handler
socket.addEventListener("error", event => {});
```
## Create a WebSocket server
Below is a simple WebSocket server built with `Bun.serve`, in which all incoming requests are [upgraded](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) to WebSocket connections in the `fetch` handler. The socket handlers are declared in the `websocket` parameter.
@@ -75,7 +109,7 @@ Bun.serve({
});
```
### Sending messages
## Sending messages
Each `ServerWebSocket` instance has a `.send()` method for sending messages to the client. It supports a range of input types.
@@ -85,7 +119,7 @@ ws.send(response.arrayBuffer()); // ArrayBuffer
ws.send(new Uint8Array([1, 2, 3])); // TypedArray | DataView
```
### Headers
## Headers
Once the upgrade succeeds, Bun will send a `101 Switching Protocols` response per the [spec](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism). Additional `headers` can be attched to this `Response` in the call to `server.upgrade()`.
@@ -103,7 +137,7 @@ Bun.serve({
});
```
### Contextual data
## Contextual data
Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. This data is made available on the `ws.data` property inside the WebSocket handlers.
@@ -111,20 +145,16 @@ Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. T
type WebSocketData = {
createdAt: number;
channelId: string;
authToken: string;
};
// TypeScript: specify the type of `data`
Bun.serve<WebSocketData>({
fetch(req, server) {
// use a library to parse cookies
const cookies = parseCookies(req.headers.get("Cookie"));
server.upgrade(req, {
// this object must conform to WebSocketData
// TS: this object must conform to WebSocketData
data: {
createdAt: Date.now(),
channelId: new URL(req.url).searchParams.get("channelId"),
authToken: cookies["X-Token"],
},
});
@@ -133,76 +163,53 @@ Bun.serve<WebSocketData>({
websocket: {
// handler called when a message is received
async message(ws, message) {
const user = getUserFromToken(ws.data.authToken);
ws.data; // WebSocketData
await saveMessageToDatabase({
channel: ws.data.channelId,
message: String(message),
userId: user.id,
});
},
},
});
```
To connect to this server from the browser, create a new `WebSocket`.
```ts#browser.js
const socket = new WebSocket("ws://localhost:3000/chat");
socket.addEventListener("message", event => {
console.log(event.data);
})
```
{% callout %}
**Identifying users** — The cookies that are currently set on the page will be sent with the WebSocket upgrade request and available on `req.headers` in the `fetch` handler. Parse these cookies to determine the identity of the connecting user and set the value of `data` accordingly.
{% /callout %}
### Pub/Sub
## Pub/Sub
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic. This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
```ts
const server = Bun.serve<{ username: string }>({
const pubsubserver = Bun.serve<{username: string}>({
fetch(req, server) {
const url = new URL(req.url);
if (url.pathname === "/chat") {
console.log(`upgrade!`);
const username = getUsernameFromReq(req);
const success = server.upgrade(req, { data: { username } });
if (req.url === '/chat') {
const cookies = getCookieFromRequest(req);
const success = server.upgrade(req, {
data: {username: cookies.username},
});
return success
? undefined
: new Response("WebSocket upgrade error", { status: 400 });
: new Response('WebSocket upgrade error', {status: 400});
}
return new Response("Hello world");
return new Response('Hello world');
},
websocket: {
open(ws) {
const msg = `${ws.data.username} has entered the chat`;
ws.subscribe("the-group-chat");
ws.publish("the-group-chat", msg);
ws.subscribe('the-group-chat');
ws.publish('the-group-chat', `${ws.data.username} has entered the chat`);
},
message(ws, message) {
// this is a group chat
// so the server re-broadcasts incoming message to everyone
ws.publish("the-group-chat", `${ws.data.username}: ${message}`);
ws.publish('the-group-chat', `${ws.data.username}: ${message}`);
},
close(ws) {
const msg = `${ws.data.username} has left the chat`;
ws.unsubscribe("the-group-chat");
ws.publish("the-group-chat", msg);
ws.unsubscribe('the-group-chat');
ws.publish('the-group-chat', `${ws.data.username} has left the chat`);
},
},
});
console.log(`Listening on ${server.hostname}:${server.port}`);
```
Calling `.publish(data)` will send the message to all subscribers of a topic _except_ the socket that called `.publish()`.
### Compression
## Compression
Per-message [compression](https://websockets.readthedocs.io/en/stable/topics/compression.html) can be enabled with the `perMessageDeflate` parameter.
@@ -224,7 +231,7 @@ ws.send("Hello world", true);
For fine-grained control over compression characteristics, refer to the [Reference](#reference).
### Backpressure
## Backpressure
The `.send(message)` method of `ServerWebSocket` returns a `number` indicating the result of the operation.
@@ -234,42 +241,6 @@ The `.send(message)` method of `ServerWebSocket` returns a `number` indicating t
This gives you better control over backpressure in your server.
## Connect to a `Websocket` server
To connect to an external socket server, either from a browser or from Bun, create an instance of `WebSocket` with the constructor.
```ts
const socket = new WebSocket("ws://localhost:3000");
```
In browsers, the cookies that are currently set on the page will be sent with the WebSocket upgrade request. This is a standard feature of the `WebSocket` API.
For convenience, Bun lets you setting custom headers directly in the constructor. This is a Bun-specific extension of the `WebSocket` standard. _This will not work in browsers._
```ts
const socket = new WebSocket("ws://localhost:3000", {
headers: {
// custom headers
},
});
```
To add event listeners to the socket:
```ts
// message is received
socket.addEventListener("message", event => {});
// socket opened
socket.addEventListener("open", event => {});
// socket closed
socket.addEventListener("close", event => {});
// error handler
socket.addEventListener("error", event => {});
```
## Reference
```ts
@@ -277,10 +248,7 @@ namespace Bun {
export function serve(params: {
fetch: (req: Request, server: Server) => Response | Promise<Response>;
websocket?: {
message: (
ws: ServerWebSocket,
message: string | ArrayBuffer | Uint8Array,
) => void;
message: (ws: ServerWebSocket, message: string | ArrayBuffer | Uint8Array) => void;
open?: (ws: ServerWebSocket) => void;
close?: (ws: ServerWebSocket) => void;
error?: (ws: ServerWebSocket, error: Error) => void;
@@ -310,11 +278,7 @@ type Compressor =
interface Server {
pendingWebsockets: number;
publish(
topic: string,
data: string | ArrayBufferView | ArrayBuffer,
compress?: boolean,
): number;
publish(topic: string, data: string | ArrayBufferView | ArrayBuffer, compress?: boolean): number;
upgrade(
req: Request,
options?: {

View File

@@ -1,164 +0,0 @@
{% callout %}
`Worker` support was added in Bun v0.7.0.
{% /callout %}
[`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) lets you start and communicate with a new JavaScript instance running on a separate thread while sharing I/O resources with the main thread.
Bun implements a minimal version of the [Web Workers API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) with extensions that make it work better for server-side use cases. Like the rest of Bun, `Worker` in Bun support CommonJS, ES Modules, TypeScript, JSX, TSX and more out of the box. No extra build steps are necessary.
## Creating a `Worker`
Like in browsers, [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) is a global. Use it to create a new worker thread.
From the main thread:
```js#Main_thread
const workerURL = new URL("worker.ts", import.meta.url).href;
const worker = new Worker(workerURL);
worker.postMessage("hello");
worker.onmessage = event => {
console.log(event.data);
};
```
Worker thread:
```ts#worker.ts_(Worker_thread)
self.onmessage = (event: MessageEvent) => {
console.log(event.data);
postMessage("world");
};
```
You can use `import`/`export` syntax in your worker code. Unlike in browsers, there's no need to specify `{type: "module"}` to use ES Modules.
To simplify error handling, the initial script to load is resolved at the time `new Worker(url)` is called.
```js
const worker = new Worker("/not-found.js");
// throws an error immediately
```
The specifier passed to `Worker` is resolved relative to the project root (like typing `bun ./path/to/file.js`).
### `"open"`
The `"open"` event is emitted when a worker is created and ready to receive messages. This can be used to send an initial message to a worker once it's ready. (This event does not exist in browsers.)
```ts
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
worker.addEventListener("open", () => {
console.log("worker is ready");
});
```
Messages are automatically enqueued until the worker is ready, so there is no need to wait for the `"open"` event to send messages.
## Messages with `postMessage`
To send messages, use [`worker.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Worker/postMessage) and [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage). This leverages the [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm).
```js
// On the worker thread, `postMessage` is automatically "routed" to the parent thread.
postMessage({ hello: "world" });
// On the main thread
worker.postMessage({ hello: "world" });
```
To receive messages, use the [`message` event handler](https://developer.mozilla.org/en-US/docs/Web/API/Worker/message_event) on the worker and main thread.
```js
// Worker thread:
self.addEventListener("message", event => {
console.log(event.data);
});
// or use the setter:
// self.onmessage = fn
// if on the main thread
worker.addEventListener("message", event => {
console.log(event.data);
});
// or use the setter:
// worker.onmessage = fn
```
## Terminating a worker
A `Worker` instance terminate automatically when Bun's process exits. To terminate a `Worker` sooner, call `worker.terminate()`.
```ts
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
// ...some time later
worker.terminate();
```
### `process.exit()`
A worker can terminate itself with `process.exit()`. This does not terminate the main process. Like in Node.js, `process.on('beforeExit', callback)` and `process.on('exit', callback)` are emitted on the worker thread (and not on the main thread).
### `"close"`
The `"close"` event is emitted when a worker has been terminated. It can take some time for the worker to actually terminate, so this event is emitted when the worker has been marked as terminated.
```ts
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
worker.addEventListener("close", () => {
console.log("worker is being closed");
});
```
This event does not exist in browsers.
## Managing lifetime
By default, an active `Worker` will _not_ keep the main (spawning) process alive. Once the main script finishes, the main thread will terminate, shutting down any workers it created.
### `worker.ref`
To keep the process alive until the `Worker` terminates, call `worker.ref()`. This couples the lifetime of the worker to the lifetime of the main process.
```ts
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
worker.ref();
```
Alternatively, you can also pass an `options` object to `Worker`:
```ts
const worker = new Worker(new URL("worker.ts", import.meta.url).href, {
ref: true,
});
```
### `worker.unref`
To stop keeping the process alive, call `worker.unref()`.
```ts
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
worker.ref();
// ...later on
worker.unref();
```
Note: `worker.ref()` and `worker.unref()` do not exist in browsers.
## Memory usage with `smol`
JavaScript instances can use a lot of memory. Bun's `Worker` supports a `smol` mode that reduces memory usage, at a cost of performance. To enable `smol` mode, pass `smol: true` to the `options` object in the `Worker` constructor.
```js
const worker = new Worker("./i-am-smol.ts", {
smol: true,
});
```
{% details summary="What does `smol` mode actually do?" %}
Setting `smol: true` sets `JSC::HeapSize` to be `Small` instead of the default `Large`.
{% /details %}

View File

@@ -307,7 +307,7 @@ Depending on the target, Bun will apply different module resolution rules and op
---
- `browser`
- _Default._ For generating bundles that are intended for execution by a browser. Prioritizes the `"browser"` export condition when resolving imports. Importing any built-in modules, like `node:events` or `node:path` will work, but calling some functions, like `fs.readFile` will not work.
- _Default._ For generating bundles that are intended for execution by a browser. Prioritizes the `"browser"` export condition when resolving imports. An error will be thrown if any Node.js or Bun built-ins are imported or used, e.g. `node:fs` or `Bun.serve`.
---
@@ -1272,17 +1272,7 @@ interface BuildArtifact extends Blob {
sourcemap?: BuildArtifact;
}
type Loader =
| "js"
| "jsx"
| "ts"
| "tsx"
| "json"
| "toml"
| "file"
| "napi"
| "wasm"
| "text";
type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "file" | "napi" | "wasm" | "text";
interface BuildOutput {
outputs: BuildArtifact[];

View File

@@ -10,7 +10,7 @@ Bun uses the file extension to determine which built-in _loader_ should be used
**JavaScript**. Default for `.cjs` and `.mjs`.
Parses the code and applies a set of default transforms, like dead-code elimination, tree shaking, and environment variable inlining. Note that Bun does not attempt to down-convert syntax at the moment.
Parses the code and applies a set if default transforms, like dead-code elimination, tree shaking, and environment variable inlining. Note that Bun does not attempt to down-convert syntax at the moment.
### `jsx`

View File

@@ -276,7 +276,7 @@ import MySvelteComponent from "./component.svelte";
console.log(mySvelteComponent.render());
```
## Reading the config
## Reading `Bun.build`'s config
Plugins can read and write to the [build config](/docs/bundler#api) with `build.config`.
@@ -305,10 +305,7 @@ Bun.build({
```ts
namespace Bun {
function plugin(plugin: {
name: string;
setup: (build: PluginBuilder) => void;
}): void;
function plugin(plugin: { name: string; setup: (build: PluginBuilder) => void }): void;
}
type PluginBuilder = {

View File

@@ -897,7 +897,7 @@ const myPlugin: BunPlugin = {
};
```
The `builder` object provides some methods for hooking into parts of the bundling process. Bun implements `onResolve` and `onLoad`; it does not yet implement the esbuild hooks `onStart`, `onEnd`, and `onDispose`, and `resolve` utilities. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use [`config`](/docs/bundler/plugins#reading-the-config) (same thing but with Bun's `BuildConfig` format) instead.
The `builder` object provides some methods for hooking into parts of the bundling process. Bun implements `onResolve` and `onLoad`; it does not yet implement the esbuild hooks `onStart`, `onEnd`, and `onDispose`, and `resolve` utilities. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use [`config`](/docs/bundler/plugins#reading-bunbuilds-config) (same thing but with Bun's `BuildConfig` format) instead.
```ts
import type { BunPlugin } from "bun";

View File

@@ -47,9 +47,6 @@ registry = "https://registry.yarnpkg.com/"
# Install for production? This is the equivalent to the "--production" CLI argument
production = false
# Disallow changes to lockfile? This is the equivalent to the "--fozen-lockfile" CLI argument
frozenLockfile = false
# Don't actually install
dryRun = true
@@ -111,7 +108,6 @@ export interface Install {
scopes: Scopes;
registry: Registry;
production: boolean;
frozenLockfile: boolean;
dryRun: boolean;
optional: boolean;
dev: boolean;

View File

@@ -49,12 +49,6 @@ To install in production mode (i.e. without `devDependencies`):
$ bun install --production
```
To install with reproducible dependencies, use `--frozen-lockfile`. If your `package.json` disagrees with `bun.lockb`, Bun will exit with an error. This is useful for production builds and CI environments.
```bash
$ bun install --frozen-lockfile
```
To perform a dry run (i.e. don't actually install anything):
```bash
@@ -86,9 +80,6 @@ peer = false
# equivalent to `--production` flag
production = false
# equivalent to `--frozen-lockfile` flag
frozenLockfile = false
# equivalent to `--dry-run` flag
dryRun = false
```
@@ -124,26 +115,6 @@ To add a package as an optional dependency (`"optionalDependencies"`):
$ bun add --optional lodash
```
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
```bash
$ bun add react --exact
```
This will add the following to your `package.json`:
```jsonc
{
"dependencies": {
// without --exact
"react": "^18.2.0", // this matches >= 18.2.0 < 19.0.0
// with --exact
"react": "18.2.0" // this matches only 18.2.0 exactly
}
}
```
To install a package globally:
```bash
@@ -214,7 +185,7 @@ $ cd /path/to/my-app
$ bun link cool-pkg
```
In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`:
This will add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`.
```json-diff
{
@@ -226,46 +197,6 @@ In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencie
}
```
## Trusted dependencies
Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts for installed dependencies, such as `postinstall`. These scripts represent a potential security risk, as they can execute arbitrary code on your machine.
<!-- Bun maintains an allow-list of popular packages containing `postinstall` scripts that are known to be safe. To run lifecycle scripts for packages that aren't on this list, add the package to `trustedDependencies` in your package.json. -->
To tell Bun to allow lifecycle scripts for a particular package, add the package to `trustedDependencies` in your package.json.
<!-- ```json-diff
{
"name": "my-app",
"version": "1.0.0",
+ "trustedDependencies": {
+ "my-trusted-package": "*"
+ }
}
``` -->
```json-diff
{
"name": "my-app",
"version": "1.0.0",
+ "trustedDependencies": ["my-trusted-package"]
}
```
Bun reads this field and will run lifecycle scripts for `my-trusted-package`.
<!-- If you specify a version range, Bun will only execute lifecycle scripts if the resolved package version matches the range. -->
<!--
```json
{
"name": "my-app",
"version": "1.0.0",
"trustedDependencies": {
"my-trusted-package": "^1.0.0"
}
}
``` -->
## Git dependencies
To add a dependency from a git repository:

View File

@@ -32,26 +32,6 @@ The "naked" `bun` command is equivalent to `bun run`.
$ bun index.tsx
```
### `--watch`
To run a file in watch mode, use the `--watch` flag.
```bash
$ bun --watch run index.tsx
```
### `--smol`
{% callout %}
Added in Bun v0.7.0.
{% /callout %}
In memory-constrained environments, use the `--smol` flag to reduce memory usage at a cost to performance.
```bash
$ bun --smol run index.tsx
```
## Run a `package.json` script
{% note %}
@@ -122,8 +102,8 @@ To debug environment variables, run `bun run env` to view a list of resolved env
Bun is designed to start fast and run fast.
Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. In most cases, the startup and running performance is faster than V8, the engine used by Node.js and Chromium-based browsers. Its transpiler and runtime are written in Zig, a modern, high-performance language. On Linux, this translates into startup times [4x faster](https://twitter.com/jarredsumner/status/1499225725492076544) than Node.js.
Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. In most cases, the startup and running performance is faster than V8, the engine used by Node.js and Chromium-based browsers. It's transpiler and runtime are written in Zig, a modern, high-performance language. On Linux, this translates into startup times [4x faster](https://twitter.com/jarredsumner/status/1499225725492076544) than Node.js.
{% image src="/images/bun-run-speed.jpeg" caption="Bun vs Node.js vs Deno running Hello World" /%}
<!-- If no `node_modules` directory is found in the working directory or above, Bun will abandon Node.js-style module resolution in favor of the `Bun module resolution algorithm`. Under Bun-style module resolution, all packages are _auto-installed_ on the fly into a [global module cache](/docs/install/cache). For full details on this algorithm, refer to [Runtime > Modules](/docs/runtime/modules). -->
<!-- If no `node_modules` directory is found in the working directory or above, Bun will abandon Node.js-style module resolution in favor of the `Bun module resolution algorithm`. Under Bun-style module resolution, all packages are _auto-installed_ on the fly into a [global module cache](/docs/cli/install#global-cache). For full details on this algorithm, refer to [Runtime > Modules](/docs/runtime/modules). -->

View File

@@ -65,24 +65,6 @@ $ bun test --preload ./setup.ts
See [Test > Lifecycle](/docs/test/lifecycle) for complete documentation.
## Mocks
Create mocks with the `mock` function. Mocks are automatically reset between tests.
```ts
import { test, expect, mock } from "bun:test";
const random = mock(() => Math.random());
test("random", async () => {
const val = random();
expect(val).toBeGreaterThan(0);
expect(random).toHaveBeenCalled();
expect(random).toHaveBeenCalledTimes(1);
});
```
See [Test > Mocks](/docs/test/mocks) for complete documentation.
## Snapshot testing
Snapshots are supported by `bun test`. See [Test > Snapshots](/docs/test/snapshots) for complete documentation.
@@ -95,8 +77,6 @@ Bun is compatible with popular UI testing libraries:
- [DOM Testing Library](https://testing-library.com/docs/dom-testing-library/intro/)
- [React Testing Library](https://testing-library.com/docs/react-testing-library/intro)
See [Test > DOM Testing](/docs/test/dom) for complete documentation.
## Performance
Bun's test runner is fast.

View File

@@ -1,20 +1,20 @@
[Stric](https://github.com/bunsvr) is a minimalist, fast web framework for Bun.
```ts#index.ts
import { Router } from '@stricjs/router';
import { App } from "@stricjs/core";
// Export the fetch handler and serve with Bun
export default new Router()
// Return 'Hi' on every request
.get('/', () => new Response('Hi'));
export default new App()
// Return "Hi!" on every request
.use(() => new Response("Hi!"));
```
Stric provides support for [ArrowJS](https://www.arrow-js.com), a library for building reactive interfaces.
Stric provides support for [ArrowJS](https://www.arrow-js.com), a library for building reactive interfaces in **native** JavaScript.
{% codetabs %}
```ts#src/App.ts
import { html } from '@stricjs/arrow/utils';
import { html } from "@stricjs/arrow/utils";
// Code inside this function can use web APIs
export function render() {
@@ -23,10 +23,10 @@ export function render() {
};
// Set the path to handle
export const path = '/';
export const path = "/";
```
```ts#index.ts
import { PageRouter } from '@stricjs/arrow';
import { PageRouter } from "@stricjs/arrow";
// Create a page router, build and serve directly
new PageRouter().serve();

View File

@@ -1,27 +0,0 @@
---
name: Convert an ArrayBuffer to an array of numbers
---
To retrieve the contents of an `ArrayBuffer` as an array of numbers, create a [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) over of the buffer. and use the [`Array.from()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/from) method to convert it to an array.
```ts
const buf = new ArrayBuffer(64);
const arr = new Uint8Array(buf);
arr.length; // 64
arr[0]; // 0 (instantiated with all zeros)
```
---
The `Uint8Array` class supports array indexing and iteration. However if you wish to convert the instance to a regular `Array`, use `Array.from()`. (This will likely be slower than using the `Uint8Array` directly.)
```ts
const buf = new ArrayBuffer(64);
const uintArr = new Uint8Array(buf);
const regularArr = Array.from(uintArr);
// number[]
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,24 +0,0 @@
---
name: Convert an ArrayBuffer to a Blob
---
A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) can be constructed from an array of "chunks", where each chunk is a string, binary data structure, or another `Blob`.
```ts
const buf = new ArrayBuffer(64);
const blob = new Blob([buf]);
```
---
By default the `type` of the resulting `Blob` will be unset. This can be set manually.
```ts
const buf = new ArrayBuffer(64);
const blob = new Blob([buf], { type: "application/octet-stream" });
blob.type; // => "application/octet-stream"
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,25 +0,0 @@
---
name: Convert an ArrayBuffer to a Uint8Array
---
The Node.js [`Buffer`](https://nodejs.org/api/buffer.html) API predates the introduction of `ArrayBuffer` into the JavaScript language. Bun implements both.
Use the static `Buffer.from()` method to create a `Buffer` from an `ArrayBuffer`.
```ts
const arrBuffer = new ArrayBuffer(64);
const nodeBuffer = Buffer.from(arrBuffer);
```
---
To create a `Buffer` that only views a portion of the underlying buffer, pass the offset and length to the constructor.
```ts
const arrBuffer = new ArrayBuffer(64);
const nodeBuffer = Buffer.from(arrBuffer, 0, 16); // view first 16 bytes
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,15 +0,0 @@
---
name: Convert an ArrayBuffer to a string
---
Bun implements the Web-standard [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) class for converting between binary data types and strings.
```ts
const buf = new ArrayBuffer(64);
const decoder = new TextDecoder();
const str = decoder.decode(buf);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,39 +0,0 @@
---
name: Convert an ArrayBuffer to a Uint8Array
---
A `Uint8Array` is a _typed array_, meaning it is a mechanism for viewing the data in an underlying `ArrayBuffer`.
```ts
const buffer = new ArrayBuffer(64);
const arr = new Uint8Array(buffer);
```
---
Instances of other typed arrays can be created similarly.
```ts
const buffer = new ArrayBuffer(64);
const arr1 = new Uint8Array(buffer);
const arr2 = new Uint16Array(buffer);
const arr3 = new Uint32Array(buffer);
const arr4 = new Float32Array(buffer);
const arr5 = new Float64Array(buffer);
const arr6 = new BigInt64Array(buffer);
const arr7 = new BigUint64Array(buffer);
```
---
To create a typed array that only views a portion of the underlying buffer, pass the offset and length to the constructor.
```ts
const buffer = new ArrayBuffer(64);
const arr = new Uint8Array(buffer, 0, 16); // view first 16 bytes
```
---
See [Docs > API > Utils](/docs/api/utils) for more useful utilities.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Blob to an ArrayBuffer
---
The [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) class provides a number of methods for consuming its contents in different formats, including `.arrayBuffer()`.
```ts
const blob = new Blob(["hello world"]);
const buf = await blob.arrayBuffer();
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Blob to a DataView
---
The [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) class provides a number of methods for consuming its contents in different formats. This snippets reads the contents to an `ArrayBuffer`, then creates a `DataView` from the buffer.
```ts
const blob = new Blob(["hello world"]);
const arr = new DataView(await blob.arrayBuffer());
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Blob to a ReadableStream
---
The [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) class provides a number of methods for consuming its contents in different formats, inluding `.stream()`. This returns `Promise<ReadableStream>`.
```ts
const blob = new Blob(["hello world"]);
const stream = await blob.stream();
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,15 +0,0 @@
---
name: Convert a Blob to a string
---
The [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) class provides a number of methods for consuming its contents in different formats, inluding `.text()`.
```ts
const blob = new Blob(["hello world"]);
const str = await blob.text();
// => "hello world"
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Blob to a Uint8Array
---
The [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) class provides a number of methods for consuming its contents in different formats. This snippets reads the contents to an `ArrayBuffer`, then creates a `Uint8Array` from the buffer.
```ts
const blob = new Blob(["hello world"]);
const arr = new Uint8Array(await blob.arrayBuffer());
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Buffer to an ArrayBuffer
---
The Node.js [`Buffer`](https://nodejs.org/api/buffer.html) class provides a way to view and manipulate data in an underlying `ArrayBuffer`, which is available via the `buffer` property.
```ts
const nodeBuf = Buffer.alloc(64);
const arrBuf = nodeBuf.buffer;
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Buffer to a blob
---
A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) can be constructed from an array of "chunks", where each chunk is a string, binary data structure (including `Buffer`), or another `Blob`.
```ts
const buf = Buffer.from("hello");
const blob = new Blob([buf]);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,41 +0,0 @@
---
name: Convert a Buffer to a ReadableStream
---
The naive approach to creating a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) from a [`Buffer`](https://nodejs.org/api/buffer.html) is to use the `ReadableStream` constructor and enqueue the entire array as a single chunk. For a large buffer, this may be undesirable as this approach does not "streaming" the data in smaller chunks.
```ts
const buf = Buffer.from("hello world");
const stream = new ReadableStream({
start(controller) {
controller.enqueue(buf);
controller.close();
},
});
```
---
To stream the data in smaller chunks, first create a `Blob` instance from the `Buffer`. Then use the [`Blob.stream()`](https://developer.mozilla.org/en-US/docs/Web/API/Blob/stream) method to create a `ReadableStream` that streams the data in chunks of a specified size.
```ts
const buf = Buffer.from("hello world");
const blob = new Blob([buf]);
const stream = blob.stream();
```
---
The chunk size can be set by passing a number to the `.stream()` method.
```ts
const buf = Buffer.from("hello world");
const blob = new Blob([buf]);
// set chunk size of 1024 bytes
const stream = blob.stream(1024);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,25 +0,0 @@
---
name: Convert a Buffer to a string
---
The [`Buffer`](https://nodejs.org/api/buffer.html) class provides a built-in `.toString()` method that converts a `Buffer` to a string.
```ts
const buf = Buffer.from("hello");
const str = buf.toString();
// => "hello"
```
---
You can optionally specify an encoding and byte range.
```ts
const buf = Buffer.from("hello world!");
const str = buf.toString("utf8", 0, 5);
// => "hello"
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Buffer to a Uint8Array
---
The Node.js [`Buffer`](https://nodejs.org/api/buffer.html) class extends [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array), so no conversion is needed. All properties and methods on `Uint8Array` are available on `Buffer`.
```ts
const buf = Buffer.alloc(64);
buf instanceof Uint8Array; // => true
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,15 +0,0 @@
---
name: Convert a Uint8Array to a string
---
If a [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView) contains ASCII-encoded text, you can convert it to a string using the [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) class.
```ts
const dv: DataView = ...;
const decoder = new TextDecoder();
const str = decoder.decode(dv);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,4 +0,0 @@
{
"name": "Binary data",
"description": "A collection of guides for converting between binary data formats with Bun"
}

View File

@@ -1,25 +0,0 @@
---
name: Convert a Uint8Array to an ArrayBuffer
---
A [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) is a _typed array_ class, meaning it is a mechanism for viewing data in an underlying `ArrayBuffer`. The underlying `ArrayBuffer` is accessible via the `buffer` property.
```ts
const arr = new Uint8Array(64);
arr.buffer; // => ArrayBuffer(64)
```
---
The `Uint8Array` may be a view over a _subset_ of the data in the underlying `ArrayBuffer`. In this case, the `buffer` property will return the entire buffer, and the `byteOffset` and `byteLength` properties will indicate the subset.
```ts
const arr = new Uint8Array(64, 16, 32);
arr.buffer; // => ArrayBuffer(64)
arr.byteOffset; // => 16
arr.byteLength; // => 32
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,16 +0,0 @@
---
name: Convert a Uint8Array to a Blob
---
A [`Blob`](https://developer.mozilla.org/en-US/docs/Web/API/Blob) can be constructed from an array of "chunks", where each chunk is a string, binary data structure (including `Uint8Array`), or another `Blob`.
```ts
const arr = new Uint8Array([0x68, 0x65, 0x6c, 0x6c, 0x6f]);
const blob = new Blob([arr]);
console.log(await blob.text());
// => "hello"
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Uint8Array to a Buffer
---
The [`Buffer`](https://nodejs.org/api/buffer.html) class extends [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) with a number of additional methods. Use `Buffer.from()` to create a `Buffer` instance from a `Uint8Array`.
```ts
const arr: Uint8Array = ...
const buf = Buffer.from(arr);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,14 +0,0 @@
---
name: Convert a Uint8Array to a DataView
---
A [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) is a _typed array_ class, meaning it is a mechanism for viewing data in an underlying `ArrayBuffer`. The following snippet creates a [`DataView`] instance over the same range of data as the `Uint8Array`.
```ts
const arr: Uint8Array = ...
const dv = new DataView(arr.buffer, arr.byteOffset, arr.byteLength);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,41 +0,0 @@
---
name: Convert a Uint8Array to a ReadableStream
---
The naive approach to creating a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) from a [`Uint8Array`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array) is to use the [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) constructor and enqueue the entire array as a single chunk. For larger chunks, this may be undesirable as it isn't actually "streaming" the data.
```ts
const arr = new Uint8Array(64);
const stream = new ReadableStream({
start(controller) {
controller.enqueue(arr);
controller.close();
},
});
```
---
To stream the data in smaller chunks, first create a `Blob` instance from the `Uint8Array`. Then use the [`Blob.stream()`](https://developer.mozilla.org/en-US/docs/Web/API/Blob/stream) method to create a `ReadableStream` that streams the data in chunks of a specified size.
```ts
const arr = new Uint8Array(64);
const blob = new Blob([arr]);
const stream = blob.stream();
```
---
The chunk size can be set by passing a number to the `.stream()` method.
```
const arr = new Uint8Array(64);
const blob = new Blob([arr]);
// set chunk size of 1024 bytes
const stream = blob.stream(1024);
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,16 +0,0 @@
---
name: Convert a Uint8Array to a string
---
Bun implements the Web-standard [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) class for converting from binary data types like `Uint8Array` and strings.
```ts
const arr = new Uint8Array([104, 101, 108, 108, 111]);
const decoder = new TextDecoder();
const str = decoder.decode(buf);
// => "hello"
```
---
See [Docs > API > Binary Data](/docs/api/binary-data#conversion) for complete documentation on manipulating binary data with Bun.

View File

@@ -1,77 +0,0 @@
---
name: Create a Discord bot
---
Discord.js works [out of the box](https://bun.sh/blog/bun-v0.6.7) with Bun. Let's write a simple bot. First create a directory and initialize it with `bun init`.
```bash
mkdir my-bot
cd my-bot
bun init
```
---
Now install Discord.js.
```bash
bun add discord.js
```
---
Before we go further, we need to go to the [Discord developer portal](https://discord.com/developers/applications), login/signup, create a new _Application_, then create a new _Bot_ within that application. Follow the [official guide](https://discordjs.guide/preparations/setting-up-a-bot-application.html#creating-your-bot) for step-by-step instructions.
---
Once complete, you'll be presented with your bot's _private key_. Let's add this to a file called `.env.local`. Bun automatically reads this file and loads it into `process.env`.
{% callout %}
This is an example token that has already been invalidated.
{% /callout %}
```txt#.env.local
DISCORD_TOKEN=NzkyNzE1NDU0MTk2MDg4ODQy.X-hvzA.Ovy4MCQywSkoMRRclStW4xAYK7I
```
---
Be sure to add `.env.local` to your `.gitignore`! It is dangerous to check your bot's private key into version control.
```txt#.gitignore
node_modules
.env.local
```
---
Now let's actually write our bot in a new file called `bot.ts`.
```ts#bot.ts
// import discord.js
import {Client, Events, GatewayIntentBits} from 'discord.js';
// create a new Client instance
const client = new Client({intents: [GatewayIntentBits.Guilds]});
// listen for the client to be ready
client.once(Events.ClientReady, (c) => {
console.log(`Ready! Logged in as ${c.user.tag}`);
});
// login with the token from .env.local
client.login(process.env.DISCORD_TOKEN);
```
---
Now we can run our bot with `bun run`. It may take a several seconds for the client to initialize the first time you run the file.
```bash
$ bun run bot.ts
Ready! Logged in as my-bot#1234
```
---
You're up and running with a bare-bones Discord.js bot! This is a basic guide to setting up your bot with Bun; we recommend the [official Discord docs](https://discordjs.guide/) for complete information on the `discord.js` API.

View File

@@ -1,4 +0,0 @@
{
"name": "Ecosystem",
"description": "A collection of guides for using various tools and frameworks with Bun"
}

View File

@@ -1,80 +0,0 @@
---
name: Use MongoDB and Mongoose
---
MongoDB and Mongoose work out of the box with Bun. This guide assumes you've already installed MongoDB and are running it as background process/service on your development machine. Follow [this guide](https://www.mongodb.com/docs/manual/installation/) for details.
---
Once MongoDB is running, create a directory and initialize it with `bun init`.
```bash
mkdir mongoose-app
cd mongoose-app
bun init
```
---
Then add Mongoose as a dependency.
```bash
bun add mongoose
```
---
In `schema.ts` we'll declare and export a simple `Animal` model.
```ts#schema.ts
import * as mongoose from 'mongoose';
const animalSchema = new mongoose.Schema(
{
name: {type: String, required: true},
sound: {type: String, required: true},
}
);
export type Animal = mongoose.InferSchemaType<typeof animalSchema>;
export const Animal = mongoose.model('Kitten', animalSchema);
```
---
Now from `index.ts` we can import `Animal`, connect to MongoDB, and add some data to our database.
```ts#index.ts
import * as mongoose from 'mongoose';
import {Animal} from './schema';
// connect to database
await mongoose.connect('mongodb://127.0.0.1:27017/mongoose-app');
// create new Animal
const cow = new Animal({
name: 'Cow',
sound: 'Moo',
});
await cow.save(); // saves to the database
// read all Animals
const animals = await Animal.find();
animals[0].speak(); // logs "Moo!"
// disconect
await mongoose.disconnect();
```
---
Lets run this with `bun run`.
```bash
$ bun run index.ts
Moo!
```
---
This is a simple introduction to using Mongoose with TypeScript and Bun. As you build your application, refer to the official [MongoDB](https://docs.mongodb.com/) and [Mongoose](https://mongoosejs.com/docs/) sites for complete documentation.

View File

@@ -1,110 +0,0 @@
---
name: Use Prisma
---
Prisma works our of the box with Bun. First, create a directory and initialize it with `bun init`.
```bash
mkdir prisma-app
cd prisma-app
bun init
```
---
Then add Prisma as a dependency.
```bash
bun add prisma
```
---
We'll use the Prisma CLI with `bunx` to initialize our schema and migration directory. For simplicity we'll be using an in-memory SQLite database.
```bash
bunx prisma init --datasource-provider sqlite
```
---
Open `prisma/schema.prisma` and add a simple `User` model.
```prisma-diff#prisma/schema.prisma
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "sqlite"
url = env("DATABASE_URL")
}
+ model User {
+ id Int @id @default(autoincrement())
+ email String @unique
+ name String?
+ }
```
---
Then generate and run initial migration.
This will generate a `.sql` migration file in `prisma/migrations`, create a new SQLite instance, and execute the migration against the new instance.
```bash
bunx prisma migrate dev --name init
```
---
Prisma automatically generates our _Prisma client_ whenever we execute a new migration. The client provides a fully typed API for reading and writing from our database.
It can be imported from `@prisma/client`.
```ts#src/index.ts
import {PrismaClient} from "@prisma/client";
```
---
Let's write a simple script to create a new user, then count the number of users in the database.
```ts#index.ts
import { PrismaClient } from "@prisma/client";
const prisma = new PrismaClient();
// create a new user
await prisma.user.create({
data: {
name: "John Dough",
email: `john-${Math.random()}@example.com`,
},
});
// count the number of users
const count = await prisma.user.count();
console.log(`There are ${count} users in the database.`);
```
---
Let's run this script with `bun run`. Each time we run it, a new user is created.
```bash
$ bun run index.ts
Created john-0.12802932895402364@example.com
There are 1 users in the database.
$ bun run index.ts
Created john-0.8671308799782803@example.com
There are 2 users in the database.
$ bun run index.ts
Created john-0.4465968383115295@example.com
There are 3 users in the database.
```
---
That's it! Now that you've set up Prisma using Bun, we recommend referring to the [official Prisma docs](https://www.prisma.io/docs/concepts/components/prisma-client) as you continue to develop your application.

View File

@@ -1,71 +0,0 @@
---
name: Use Vite
---
{% callout %}
While Vite currently works with Bun, it has not been heavily optimized, nor has Vite been adapted to use Bun's bundler, module resolver, or transpiler.
{% /callout %}
---
Vite works out of the box with Bun (v0.7 and later). Get started with one of Vite's templates.
```bash
$ bunx create-vite my-app
✔ Select a framework: React
✔ Select a variant: TypeScript + SWC
Scaffolding project in /path/to/my-app...
```
---
Then `cd` into the project directory and install dependencies.
```bash
cd my-app
bun install
```
---
Start the development server with the `vite` CLI using `bunx`.
The `--bun` flag tells Bun to run Vite's CLI using `bun` instead of `node`; by default Bun respects Vite's `#!/usr/bin/env node` [shebang line](<https://en.wikipedia.org/wiki/Shebang_(Unix)>). After Bun 1.0 this flag will no longer be necessary.
```bash
bunx --bun vite
```
---
To simplify this command, update the `"dev"` script in `package.json` to the following.
```json-diff#package.json
"scripts": {
- "dev": "vite",
+ "dev": "bunx --bun vite",
"build": "vite build",
"serve": "vite preview"
},
// ...
```
---
Now you can start the development server with `bun run dev`.
```bash
bun run dev
```
---
The following command will build your app for production.
```sh
$ bunx --bun vite build
```
---
This is a stripped down guide to get you started with Vite + Bun. For more information, see the [Vite documentation](https://vitejs.dev/guide/).

View File

@@ -1,55 +0,0 @@
---
name: Hot reload an HTTP server
---
Bun supports the [`--hot`](/docs/runtime/hot#hot-mode) flag to run a file with hot reloading enabled. When any module or file changes, Bun re-runs the file.
```sh
bun --hot run index.ts
```
---
To avoid re-running `Bun.serve()` during `--hot` reloads, you should assign the `Server` instance as a property of `globalThis`. The `globalThis` object survives hot reloads.
```ts
import { type Serve, type Server } from "bun";
// make TypeScript happy
declare global {
var server: Server;
}
// define server parameters
const serveOptions: Serve = {
port: 3000,
fetch(req) {
return new Response(`Hello world`);
},
};
if (!globalThis.server) {
globalThis.server = Bun.serve(serveOptions);
} else {
globalThis.server.reload(serveOptions);
}
```
---
To avoid manually calling `server.reload()`, you can use start a server with Bun's [object syntax](/docs/runtime/hot#http-servers). If you `export default` a plain object with a `fetch` handler defined, then run this file with Bun, Bun will start an HTTP server as if you'd passed this object into `Bun.serve()`.
With this approach, Bun automatically reloads the server when reloads happen.
See [HTTP > Hot Reloading](<[/docs/api/http](https://bun.sh/docs/api/http#hot-reloading)>) for full docs.
```ts
import { type Serve } from "bun";
export default {
port: 3000,
fetch(req) {
return new Response(`Hello world`);
},
} satisfies Serve;
```

View File

@@ -1,4 +0,0 @@
{
"name": "HTTP",
"description": "A collection of guides for building HTTP servers with Bun"
}

View File

@@ -1,18 +0,0 @@
---
name: Write a simple HTTP server
---
This starts an HTTP server listening on port `3000`. It responds to all requests with a `Response` with status `200` and body `"Welcome to Bun!"`.
See [`Bun.serve`](/docs/api/http) for details.
```ts
const server = Bun.serve({
port: 3000,
fetch(request) {
return new Response("Welcome to Bun!");
},
});
console.log(`Listening on localhost:\${server.port}`);
```

Some files were not shown because too many files have changed in this diff Show More