mirror of
https://github.com/oven-sh/bun
synced 2026-02-06 00:48:55 +00:00
Compare commits
14 Commits
jarred/str
...
debugger-d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7c9352392d | ||
|
|
fef9853d5a | ||
|
|
7dae496847 | ||
|
|
92b060c6e2 | ||
|
|
2a64e8b3bb | ||
|
|
fde3b7fbb6 | ||
|
|
c16e769383 | ||
|
|
5badc728d0 | ||
|
|
c4b3b321c2 | ||
|
|
cf599e77d9 | ||
|
|
b727689a9b | ||
|
|
4a36470588 | ||
|
|
ca08cf6b0a | ||
|
|
10bd0fac3a |
2
.github/workflows/bun-linux-aarch64.yml
vendored
2
.github/workflows/bun-linux-aarch64.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
|
||||
8
.github/workflows/bun-linux-build.yml
vendored
8
.github/workflows/bun-linux-build.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
- cpu: nehalem
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
|
||||
@@ -187,8 +187,8 @@ jobs:
|
||||
unzip bun-${{matrix.tag}}.zip
|
||||
cd bun-${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
|
||||
32
.github/workflows/bun-mac-aarch64.yml
vendored
32
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
dependencies: true
|
||||
compile_obj: true
|
||||
@@ -173,9 +173,9 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -265,14 +265,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -291,8 +291,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -397,7 +397,7 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
timeout-minutes: 10
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
@@ -426,8 +426,8 @@ jobs:
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
|
||||
30
.github/workflows/bun-mac-x64-baseline.yml
vendored
30
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -173,9 +173,9 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -258,7 +258,7 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -266,14 +266,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -292,8 +292,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -430,8 +430,8 @@ jobs:
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
|
||||
28
.github/workflows/bun-mac-x64.yml
vendored
28
.github/workflows/bun-mac-x64.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -173,8 +173,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
@@ -260,7 +260,7 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
@@ -268,14 +268,14 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -294,8 +294,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
env:
|
||||
@@ -432,8 +432,8 @@ jobs:
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: bun-release-types-canary
|
||||
name: bun-release-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
push:
|
||||
|
||||
4
.github/workflows/bun-release.yml
vendored
4
.github/workflows/bun-release.yml
vendored
@@ -156,8 +156,8 @@ jobs:
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: |
|
||||
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
|
||||
4
.github/workflows/zig-fmt.yml
vendored
4
.github/workflows/zig-fmt.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.11.0-dev.3737+9eb008717
|
||||
ZIG_VERSION: 0.11.0-dev.2571+31738de28
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
run: |
|
||||
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
|
||||
tar -xf zig.tar.xz
|
||||
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
|
||||
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -121,5 +121,3 @@ cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
/test.ts
|
||||
|
||||
src/js/out/modules_dev
|
||||
|
||||
@@ -6,7 +6,7 @@ module.exports = {
|
||||
quoteProps: "preserve",
|
||||
overrides: [
|
||||
{
|
||||
files: ["*.md"],
|
||||
files: "README.md",
|
||||
options: {
|
||||
printWidth: 80,
|
||||
},
|
||||
|
||||
7
.vscode/c_cpp_properties.json
vendored
7
.vscode/c_cpp_properties.json
vendored
@@ -19,11 +19,11 @@
|
||||
"${workspaceFolder}/src/js/out",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src"
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src",
|
||||
"${workspaceFolder}/src/deps/uws/src"
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/../webkit-build/include/",
|
||||
"${workspaceFolder}/bun-webkit/include/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
|
||||
@@ -39,7 +39,8 @@
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src"
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src",
|
||||
"${workspaceFolder}/src/deps/uws/src"
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb"
|
||||
|
||||
31
.vscode/launch.json
generated
vendored
31
.vscode/launch.json
generated
vendored
@@ -14,8 +14,7 @@
|
||||
"name": "bun test [file]",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -30,8 +29,7 @@
|
||||
"name": "bun test [file] (fast)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
@@ -46,8 +44,7 @@
|
||||
"name": "bun test [file] (verbose)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
@@ -60,8 +57,7 @@
|
||||
"name": "bun test [file] --watch",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--watch", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
@@ -75,8 +71,7 @@
|
||||
"name": "bun test [file] --only",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
@@ -105,7 +100,6 @@
|
||||
"name": "bun test [*] (fast)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -120,7 +114,6 @@
|
||||
"name": "bun test [*] --only",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--only"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -143,6 +136,20 @@
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun run [Inspect]",
|
||||
"program": "bun-debug",
|
||||
"args": ["--inspect-brk", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
|
||||
@@ -10,9 +10,9 @@ ARG ARCH=x86_64
|
||||
ARG BUILD_MACHINE_ARCH=x86_64
|
||||
ARG TRIPLET=${ARCH}-linux-gnu
|
||||
ARG BUILDARCH=amd64
|
||||
ARG WEBKIT_TAG=may20-2
|
||||
ARG WEBKIT_TAG=may20
|
||||
ARG ZIG_TAG=jul1
|
||||
ARG ZIG_VERSION="0.11.0-dev.3737+9eb008717"
|
||||
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
|
||||
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
|
||||
|
||||
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
|
||||
|
||||
19
Makefile
19
Makefile
@@ -561,9 +561,6 @@ builtins:
|
||||
esm:
|
||||
NODE_ENV=production bun src/js/build-esm.ts
|
||||
|
||||
esm-debug:
|
||||
BUN_DEBUG_QUIET_LOGS=1 NODE_ENV=production bun-debug src/js/build-esm.ts
|
||||
|
||||
.PHONY: generate-builtins
|
||||
generate-builtins: builtins
|
||||
|
||||
@@ -1088,7 +1085,7 @@ dev-obj-linux:
|
||||
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
|
||||
|
||||
.PHONY: dev
|
||||
dev: mkdir-dev esm dev-obj link
|
||||
dev: mkdir-dev esm dev-obj bun-link-lld-debug
|
||||
|
||||
mkdir-dev:
|
||||
mkdir -p $(DEBUG_PACKAGE_DIR)
|
||||
@@ -1359,21 +1356,15 @@ mimalloc-wasm:
|
||||
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
|
||||
|
||||
# alias for link, incase anyone still types that
|
||||
.PHONY: bun-link-lld-debug
|
||||
bun-link-lld-debug: link
|
||||
|
||||
# link a debug build of bun
|
||||
.PHONY: link
|
||||
link:
|
||||
bun-link-lld-debug:
|
||||
$(CXX) $(BUN_LLD_FLAGS_DEBUG) $(DEBUG_FLAGS) $(SYMBOLS) \
|
||||
-g \
|
||||
$(DEBUG_BIN)/bun-debug.o \
|
||||
-W \
|
||||
-o $(DEBUG_BIN)/bun-debug
|
||||
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
|
||||
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
|
||||
|
||||
link-no-jsc:
|
||||
bun-link-lld-debug-no-jsc:
|
||||
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
|
||||
-g \
|
||||
$(DEBUG_BIN)/bun-debug.o \
|
||||
@@ -1694,7 +1685,7 @@ sizegen:
|
||||
# Linux uses bundled SQLite3
|
||||
ifeq ($(OS_NAME),linux)
|
||||
sqlite:
|
||||
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
|
||||
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
|
||||
endif
|
||||
|
||||
picohttp:
|
||||
|
||||
@@ -128,27 +128,27 @@ pub fn main() anyerror!void {
|
||||
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
|
||||
counters[counter].rotate = rotate % 360;
|
||||
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
|
||||
counters[counter].color_values[0] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
|
||||
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
|
||||
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
|
||||
counters[counter].color_values[6] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
|
||||
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
|
||||
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
|
||||
counters[counter].color_values[12] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
|
||||
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
|
||||
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
|
||||
counters[counter].color_values[18] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
|
||||
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
|
||||
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
|
||||
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
|
||||
_ = try recorder.wait();
|
||||
|
||||
all_timestamps[0] = wrote.len;
|
||||
for (counters, 0..) |count, i| {
|
||||
for (counters) |count, i| {
|
||||
all_timestamps[i + 1] = count.timestamp;
|
||||
}
|
||||
|
||||
|
||||
@@ -115,27 +115,27 @@ pub fn main() anyerror!void {
|
||||
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
|
||||
counters[counter].rotate = rotate % 360;
|
||||
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
|
||||
counters[counter].color_values[0] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
|
||||
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
|
||||
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
|
||||
counters[counter].color_values[6] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
|
||||
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
|
||||
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
|
||||
counters[counter].color_values[12] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
|
||||
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
|
||||
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
|
||||
counters[counter].color_values[18] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @intFromFloat(u32, std.math.round(@floatFromInt(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
|
||||
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
|
||||
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
|
||||
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
|
||||
_ = try recorder.wait();
|
||||
|
||||
all_timestamps[0] = wrote.len;
|
||||
for (counters, 0..) |count, i| {
|
||||
for (counters) |count, i| {
|
||||
all_timestamps[i + 1] = count.timestamp;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import crypto from "node:crypto";
|
||||
|
||||
var crypto = globalThis.crypto;
|
||||
|
||||
if (!crypto) {
|
||||
crypto = await import("node:crypto");
|
||||
}
|
||||
|
||||
var foo = new Uint8Array(65536);
|
||||
bench("crypto.getRandomValues(65536)", () => {
|
||||
crypto.getRandomValues(foo);
|
||||
@@ -16,8 +22,4 @@ bench("crypto.randomUUID()", () => {
|
||||
return crypto.randomUUID()[2];
|
||||
});
|
||||
|
||||
bench("crypto.randomInt()", () => {
|
||||
return crypto.randomInt(0, 100);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
// This is a stress test of some internals in How Bun does the module.exports assignment.
|
||||
// If it crashes or throws then this fails
|
||||
import("./runner.mjs").then(({ bench, run }) => {
|
||||
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
|
||||
Object.defineProperty(module, "exports", {
|
||||
get() {
|
||||
return 42;
|
||||
},
|
||||
set() {
|
||||
throw new Error("bad");
|
||||
},
|
||||
configurable: true,
|
||||
});
|
||||
if (module.exports !== 42) throw new Error("bad");
|
||||
if (!Object.getOwnPropertyDescriptor(module, "exports").get) throw new Error("bad");
|
||||
});
|
||||
|
||||
bench("Object.defineProperty(module.exports = {})", () => {
|
||||
Object.defineProperty(module, "exports", {
|
||||
value: { abc: 123 },
|
||||
});
|
||||
|
||||
if (!module.exports.abc) throw new Error("bad");
|
||||
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
|
||||
});
|
||||
|
||||
bench("module.exports = {}", () => {
|
||||
module.exports = { abc: 123 };
|
||||
|
||||
if (!module.exports.abc) throw new Error("bad");
|
||||
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
|
||||
});
|
||||
|
||||
run().then(() => {
|
||||
module.exports = {
|
||||
a: 1,
|
||||
};
|
||||
|
||||
console.log(
|
||||
module?.exports,
|
||||
require.cache[module.id].exports,
|
||||
module?.exports === require.cache[module.id],
|
||||
__dirname,
|
||||
Object.keys(require(module.id)),
|
||||
require(module.id),
|
||||
);
|
||||
|
||||
module.exports = function lol() {
|
||||
return 42;
|
||||
};
|
||||
|
||||
console.log(module.exports, module.exports());
|
||||
|
||||
queueMicrotask(() => {
|
||||
console.log(
|
||||
module?.exports,
|
||||
require.cache[module.id].exports,
|
||||
module?.exports === require.cache[module.id]?.exports,
|
||||
__dirname,
|
||||
Object.keys(require(module.id)),
|
||||
require(module.id),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,10 +1,4 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("realpathSync x " + count, () => {
|
||||
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
|
||||
});
|
||||
|
||||
await run();
|
||||
for (let i = 0; i < count; i++) realpathSync(arg);
|
||||
|
||||
@@ -1,60 +0,0 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
function deprecateUsingClosure(fn, msg, code) {
|
||||
if (process.noDeprecation === true) {
|
||||
return fn;
|
||||
}
|
||||
|
||||
var realFn = fn;
|
||||
var wrapper = () => {
|
||||
return fnToWrap.apply(this, arguments);
|
||||
};
|
||||
|
||||
var deprecater = () => {
|
||||
if (process.throwDeprecation) {
|
||||
var err = new Error(msg);
|
||||
if (code) err.code = code;
|
||||
throw err;
|
||||
} else if (process.traceDeprecation) {
|
||||
console.trace(msg);
|
||||
} else {
|
||||
console.error(msg);
|
||||
}
|
||||
|
||||
fnToWrap = realFn;
|
||||
return realFn.apply(this, arguments);
|
||||
};
|
||||
var fnToWrap = deprecater;
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
function deprecateOriginal(fn, msg) {
|
||||
var warned = false;
|
||||
function deprecated() {
|
||||
if (!warned) {
|
||||
if (process.throwDeprecation) {
|
||||
throw new Error(msg);
|
||||
} else if (process.traceDeprecation) {
|
||||
console.trace(msg);
|
||||
} else {
|
||||
console.error(msg);
|
||||
}
|
||||
warned = true;
|
||||
}
|
||||
return fn.apply(this, arguments);
|
||||
}
|
||||
return deprecated;
|
||||
}
|
||||
|
||||
const deprecatedy = deprecateUsingClosure(() => {}, "This is deprecated", "DEP0001");
|
||||
const deprecatedy2 = deprecateOriginal(() => {}, "This is deprecated");
|
||||
|
||||
bench("deprecateUsingClosure", () => {
|
||||
deprecatedy(Math.random() + 1);
|
||||
});
|
||||
|
||||
bench("deprecateOriginal", () => {
|
||||
deprecatedy2(Math.random() + 1);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -32,7 +32,6 @@ const server = Bun.serve({
|
||||
},
|
||||
|
||||
perMessageDeflate: false,
|
||||
publishToSelf: true,
|
||||
},
|
||||
|
||||
fetch(req, server) {
|
||||
|
||||
@@ -42,4 +42,4 @@ function sendReadyMessage() {
|
||||
|
||||
console.log(`Waiting for ${CLIENTS_TO_WAIT_FOR} clients to connect..`);
|
||||
|
||||
Deno.serve({ port }, reqHandler);
|
||||
Deno.serve(reqHandler, { port });
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
const std = @import("std");
|
||||
const Wyhash = @import("./src/wyhash.zig").Wyhash;
|
||||
|
||||
fn moduleSource(comptime out: []const u8) FileSource {
|
||||
if (comptime std.fs.path.dirname(@src().file)) |base| {
|
||||
@@ -77,13 +76,13 @@ const BunBuildOptions = struct {
|
||||
|
||||
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
|
||||
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
|
||||
const runtime_hash = Wyhash.hash(
|
||||
const runtime_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
|
||||
);
|
||||
this.runtime_js_version = runtime_hash;
|
||||
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
|
||||
const fallback_hash = Wyhash.hash(
|
||||
const fallback_hash = std.hash.Wyhash.hash(
|
||||
0,
|
||||
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
|
||||
);
|
||||
@@ -193,12 +192,12 @@ pub fn build(b: *Build) !void {
|
||||
else
|
||||
"root.zig";
|
||||
|
||||
const min_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
|
||||
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
|
||||
target.getOsVersionMin().semver
|
||||
else
|
||||
.{ .major = 0, .minor = 0, .patch = 0 };
|
||||
|
||||
const max_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
|
||||
const max_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
|
||||
target.getOsVersionMax().semver
|
||||
else
|
||||
.{ .major = 0, .minor = 0, .patch = 0 };
|
||||
|
||||
@@ -92,10 +92,10 @@ _bun_completions() {
|
||||
PACKAGE_OPTIONS[REMOVE_OPTIONS_LONG]="";
|
||||
PACKAGE_OPTIONS[REMOVE_OPTIONS_SHORT]="";
|
||||
|
||||
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --frozen-lockfile --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
|
||||
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
|
||||
PACKAGE_OPTIONS[SHARED_OPTIONS_SHORT]="-c -y -p -f -g";
|
||||
|
||||
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --frozen-lockfile --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
|
||||
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
|
||||
PM_OPTIONS[SHORT_OPTIONS]="-c -y -p -f -g"
|
||||
|
||||
local cur_word="${COMP_WORDS[${COMP_CWORD}]}";
|
||||
|
||||
@@ -47,7 +47,6 @@ _bun() {
|
||||
'-g[Add a package globally]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
@@ -89,7 +88,6 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
@@ -125,7 +123,6 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
@@ -281,7 +278,6 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]'
|
||||
'-p[Do not install devDependencies]'
|
||||
'--production[Do not install devDependencies]'
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--no-save[Do not save a lockfile]'
|
||||
'--dry-run[Do not install anything]'
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]'
|
||||
@@ -536,7 +532,6 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
@@ -570,7 +565,6 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'-g[Remove a package globally]' \
|
||||
|
||||
@@ -115,7 +115,6 @@ subcommands:
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
@@ -153,7 +152,6 @@ subcommands:
|
||||
- development -- "Add dependency to devDependencies"
|
||||
- d -- "Add dependency to devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
@@ -194,7 +192,6 @@ subcommands:
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
|
||||
@@ -202,53 +202,6 @@ const response = await fetch("https://bun.sh");
|
||||
await Bun.write("index.html", response);
|
||||
```
|
||||
|
||||
## Incremental writing with `FileSink`
|
||||
|
||||
Bun provides a native incremental file writing API called `FileSink`. To retrieve a `FileSink` instance from a `BunFile`:
|
||||
|
||||
```ts
|
||||
const file = Bun.file("output.txt");
|
||||
const writer = file.writer();
|
||||
```
|
||||
|
||||
To incrementally write to the file, call `.write()`.
|
||||
|
||||
```ts
|
||||
const file = Bun.file("output.txt");
|
||||
const writer = file.writer();
|
||||
|
||||
writer.write("it was the best of times\n");
|
||||
writer.write("it was the worst of times\n");
|
||||
```
|
||||
|
||||
These chunks will be buffered internally. To flush the buffer to disk, use `.flush()`. This returns the number of flushed bytes.
|
||||
|
||||
```ts
|
||||
writer.flush(); // write buffer to disk
|
||||
```
|
||||
|
||||
The buffer will also auto-flush when the `FileSink`'s _high water mark_ is reached; that is, when its internal buffer is full. This value can be configured.
|
||||
|
||||
```ts
|
||||
const file = Bun.file("output.txt");
|
||||
const writer = file.writer({ highWaterMark: 1024 * 1024 }); // 1MB
|
||||
```
|
||||
|
||||
To flush the buffer and close the file:
|
||||
|
||||
```ts
|
||||
writer.end();
|
||||
```
|
||||
|
||||
Note that, by default, the `bun` process will stay alive until this `FileSink` is explicitly closed with `.end()`. To opt out of this behavior, you can "unref" the instance.
|
||||
|
||||
```ts
|
||||
writer.unref();
|
||||
|
||||
// to "re-ref" it later
|
||||
writer.ref();
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
The following is a 3-line implementation of the Linux `cat` command.
|
||||
@@ -297,15 +250,5 @@ interface BunFile {
|
||||
stream(): Promise<ReadableStream>;
|
||||
arrayBuffer(): Promise<ArrayBuffer>;
|
||||
json(): Promise<any>;
|
||||
writer(params: { highWaterMark?: number }): FileSink;
|
||||
}
|
||||
|
||||
export interface FileSink {
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
|
||||
flush(): number | Promise<number>;
|
||||
end(error?: Error): number | Promise<number>;
|
||||
start(options?: { highWaterMark?: number }): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
}
|
||||
```
|
||||
|
||||
@@ -4,71 +4,6 @@ Bun implements the `createHash` and `createHmac` functions from [`node:crypto`](
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## `Bun.password`
|
||||
|
||||
{% callout %}
|
||||
**Note** — Added in Bun 0.6.8.
|
||||
{% /callout %}
|
||||
|
||||
`Bun.password` is a collection of utility functions for hashing and verifying passwords with various cryptographically secure algorithms.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
const hash = await Bun.password.hash(password);
|
||||
// => $argon2id$v=19$m=65536,t=2,p=1$tFq+9AVr1bfPxQdh6E8DQRhEXg/M/SqYCNu6gVdRRNs$GzJ8PuBi+K+BVojzPfS5mjnC8OpLGtv8KJqF99eP6a4
|
||||
|
||||
const isMatch = await Bun.password.verify(password, hash);
|
||||
// => true
|
||||
```
|
||||
|
||||
The second argument to `Bun.password.hash` accepts a params object that lets you pick and configure the hashing algorithm.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
// use argon2 (default)
|
||||
const argonHash = await Bun.password.hash(password, {
|
||||
algorithm: "argon2id", // "argon2id" | "argon2i" | "argon2d"
|
||||
memoryCost: 4, // memory usage in kibibytes
|
||||
timeCost: 3, // the number of iterations
|
||||
});
|
||||
|
||||
// use bcrypt
|
||||
const bcryptHash = await Bun.password.hash(password, {
|
||||
algorithm: "bcrypt",
|
||||
cost: 4, // number between 4-31
|
||||
});
|
||||
```
|
||||
|
||||
The algorithm used to create the hash is stored in the hash itself. When using `bcrypt`, the returned hash is encoded in [Modular Crypt Format](https://passlib.readthedocs.io/en/stable/modular_crypt_format.html) for compatibility with most existing `bcrypt` implementations; with `argon2` the result is encoded in the newer [PHC format](https://github.com/P-H-C/phc-string-format/blob/master/phc-sf-spec.md).
|
||||
|
||||
The `verify` function automatically detects the algorithm based on the input hash and use the correct verification method. It can correctly infer the algorithm from both PHC- or MCF-encoded hashes.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
const hash = await Bun.password.hash(password, {
|
||||
/* config */
|
||||
});
|
||||
|
||||
const isMatch = await Bun.password.verify(password, hash);
|
||||
// => true
|
||||
```
|
||||
|
||||
Synchronous versions of all functions are also available. Keep in mind that these functions are computationally expensive, so using a blocking API may degrade application performance.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
const hash = Bun.password.hashSync(password, {
|
||||
/* config */
|
||||
});
|
||||
|
||||
const isMatch = Bun.password.verifySync(password, hash);
|
||||
// => true
|
||||
```
|
||||
|
||||
## `Bun.hash`
|
||||
|
||||
`Bun.hash` is a collection of utilities for _non-cryptographic_ hashing. Non-cryptographic hashing algorithms are optimized for speed of computation over collision-resistance or security.
|
||||
|
||||
@@ -125,7 +125,8 @@ Bun.serve({
|
||||
// string
|
||||
key: fs.readFileSync("./key.pem", "utf8"),
|
||||
// array of above
|
||||
key: [Bun.file('./key1.pem'), Bun.file('./key2.pem')],
|
||||
key: [Bun.file('./key1.pem'), Bun.file('./key2.pem']
|
||||
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
@@ -75,6 +75,16 @@ Bun.sleepSync(1000); // blocks thread for one second
|
||||
console.log("hello one second later!");
|
||||
```
|
||||
|
||||
Alternatively, pass a `Date` object to receive a `Promise` that resolves at that point in time.
|
||||
|
||||
```ts
|
||||
const oneSecondInFuture = new Date(Date.now() + 1000);
|
||||
|
||||
console.log("hello");
|
||||
await Bun.sleep(oneSecondInFuture);
|
||||
console.log("hello one second later!");
|
||||
```
|
||||
|
||||
## `Bun.which()`
|
||||
|
||||
`Bun.which(bin: string)`
|
||||
|
||||
@@ -12,7 +12,41 @@
|
||||
Internally Bun's WebSocket implementation is built on [uWebSockets](https://github.com/uNetworking/uWebSockets).
|
||||
{% /callout %}
|
||||
|
||||
## Start a WebSocket server
|
||||
## Connect to a WebSocket server
|
||||
|
||||
To connect to an external socket server, create an instance of `WebSocket` with the constructor.
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000");
|
||||
```
|
||||
|
||||
Bun supports setting custom headers. This is a Bun-specific extension of the `WebSocket` standard.
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000", {
|
||||
headers: {
|
||||
// custom headers
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
To add event listeners to the socket:
|
||||
|
||||
```ts
|
||||
// message is received
|
||||
socket.addEventListener("message", event => {});
|
||||
|
||||
// socket opened
|
||||
socket.addEventListener("open", event => {});
|
||||
|
||||
// socket closed
|
||||
socket.addEventListener("close", event => {});
|
||||
|
||||
// error handler
|
||||
socket.addEventListener("error", event => {});
|
||||
```
|
||||
|
||||
## Create a WebSocket server
|
||||
|
||||
Below is a simple WebSocket server built with `Bun.serve`, in which all incoming requests are [upgraded](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) to WebSocket connections in the `fetch` handler. The socket handlers are declared in the `websocket` parameter.
|
||||
|
||||
@@ -75,7 +109,7 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
### Sending messages
|
||||
## Sending messages
|
||||
|
||||
Each `ServerWebSocket` instance has a `.send()` method for sending messages to the client. It supports a range of input types.
|
||||
|
||||
@@ -85,7 +119,7 @@ ws.send(response.arrayBuffer()); // ArrayBuffer
|
||||
ws.send(new Uint8Array([1, 2, 3])); // TypedArray | DataView
|
||||
```
|
||||
|
||||
### Headers
|
||||
## Headers
|
||||
|
||||
Once the upgrade succeeds, Bun will send a `101 Switching Protocols` response per the [spec](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism). Additional `headers` can be attched to this `Response` in the call to `server.upgrade()`.
|
||||
|
||||
@@ -103,7 +137,7 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
### Contextual data
|
||||
## Contextual data
|
||||
|
||||
Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. This data is made available on the `ws.data` property inside the WebSocket handlers.
|
||||
|
||||
@@ -111,20 +145,16 @@ Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. T
|
||||
type WebSocketData = {
|
||||
createdAt: number;
|
||||
channelId: string;
|
||||
authToken: string;
|
||||
};
|
||||
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
fetch(req, server) {
|
||||
// use a library to parse cookies
|
||||
const cookies = parseCookies(req.headers.get("Cookie"));
|
||||
server.upgrade(req, {
|
||||
// this object must conform to WebSocketData
|
||||
// TS: this object must conform to WebSocketData
|
||||
data: {
|
||||
createdAt: Date.now(),
|
||||
channelId: new URL(req.url).searchParams.get("channelId"),
|
||||
authToken: cookies["X-Token"],
|
||||
},
|
||||
});
|
||||
|
||||
@@ -133,76 +163,53 @@ Bun.serve<WebSocketData>({
|
||||
websocket: {
|
||||
// handler called when a message is received
|
||||
async message(ws, message) {
|
||||
const user = getUserFromToken(ws.data.authToken);
|
||||
|
||||
ws.data; // WebSocketData
|
||||
await saveMessageToDatabase({
|
||||
channel: ws.data.channelId,
|
||||
message: String(message),
|
||||
userId: user.id,
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
To connect to this server from the browser, create a new `WebSocket`.
|
||||
|
||||
```ts#browser.js
|
||||
const socket = new WebSocket("ws://localhost:3000/chat");
|
||||
|
||||
socket.addEventListener("message", event => {
|
||||
console.log(event.data);
|
||||
})
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Identifying users** — The cookies that are currently set on the page will be sent with the WebSocket upgrade request and available on `req.headers` in the `fetch` handler. Parse these cookies to determine the identity of the connecting user and set the value of `data` accordingly.
|
||||
{% /callout %}
|
||||
|
||||
### Pub/Sub
|
||||
## Pub/Sub
|
||||
|
||||
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic. This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
|
||||
|
||||
```ts
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
const pubsubserver = Bun.serve<{username: string}>({
|
||||
fetch(req, server) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/chat") {
|
||||
console.log(`upgrade!`);
|
||||
const username = getUsernameFromReq(req);
|
||||
const success = server.upgrade(req, { data: { username } });
|
||||
if (req.url === '/chat') {
|
||||
const cookies = getCookieFromRequest(req);
|
||||
const success = server.upgrade(req, {
|
||||
data: {username: cookies.username},
|
||||
});
|
||||
return success
|
||||
? undefined
|
||||
: new Response("WebSocket upgrade error", { status: 400 });
|
||||
: new Response('WebSocket upgrade error', {status: 400});
|
||||
}
|
||||
|
||||
return new Response("Hello world");
|
||||
return new Response('Hello world');
|
||||
},
|
||||
websocket: {
|
||||
open(ws) {
|
||||
const msg = `${ws.data.username} has entered the chat`;
|
||||
ws.subscribe("the-group-chat");
|
||||
ws.publish("the-group-chat", msg);
|
||||
ws.subscribe('the-group-chat');
|
||||
ws.publish('the-group-chat', `${ws.data.username} has entered the chat`);
|
||||
},
|
||||
message(ws, message) {
|
||||
// this is a group chat
|
||||
// so the server re-broadcasts incoming message to everyone
|
||||
ws.publish("the-group-chat", `${ws.data.username}: ${message}`);
|
||||
ws.publish('the-group-chat', `${ws.data.username}: ${message}`);
|
||||
},
|
||||
close(ws) {
|
||||
const msg = `${ws.data.username} has left the chat`;
|
||||
ws.unsubscribe("the-group-chat");
|
||||
ws.publish("the-group-chat", msg);
|
||||
ws.unsubscribe('the-group-chat');
|
||||
ws.publish('the-group-chat', `${ws.data.username} has left the chat`);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Listening on ${server.hostname}:${server.port}`);
|
||||
```
|
||||
|
||||
Calling `.publish(data)` will send the message to all subscribers of a topic _except_ the socket that called `.publish()`.
|
||||
|
||||
### Compression
|
||||
## Compression
|
||||
|
||||
Per-message [compression](https://websockets.readthedocs.io/en/stable/topics/compression.html) can be enabled with the `perMessageDeflate` parameter.
|
||||
|
||||
@@ -224,7 +231,7 @@ ws.send("Hello world", true);
|
||||
|
||||
For fine-grained control over compression characteristics, refer to the [Reference](#reference).
|
||||
|
||||
### Backpressure
|
||||
## Backpressure
|
||||
|
||||
The `.send(message)` method of `ServerWebSocket` returns a `number` indicating the result of the operation.
|
||||
|
||||
@@ -234,42 +241,6 @@ The `.send(message)` method of `ServerWebSocket` returns a `number` indicating t
|
||||
|
||||
This gives you better control over backpressure in your server.
|
||||
|
||||
## Connect to a `Websocket` server
|
||||
|
||||
To connect to an external socket server, either from a browser or from Bun, create an instance of `WebSocket` with the constructor.
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000");
|
||||
```
|
||||
|
||||
In browsers, the cookies that are currently set on the page will be sent with the WebSocket upgrade request. This is a standard feature of the `WebSocket` API.
|
||||
|
||||
For convenience, Bun lets you setting custom headers directly in the constructor. This is a Bun-specific extension of the `WebSocket` standard. _This will not work in browsers._
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000", {
|
||||
headers: {
|
||||
// custom headers
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
To add event listeners to the socket:
|
||||
|
||||
```ts
|
||||
// message is received
|
||||
socket.addEventListener("message", event => {});
|
||||
|
||||
// socket opened
|
||||
socket.addEventListener("open", event => {});
|
||||
|
||||
// socket closed
|
||||
socket.addEventListener("close", event => {});
|
||||
|
||||
// error handler
|
||||
socket.addEventListener("error", event => {});
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
```ts
|
||||
@@ -277,10 +248,7 @@ namespace Bun {
|
||||
export function serve(params: {
|
||||
fetch: (req: Request, server: Server) => Response | Promise<Response>;
|
||||
websocket?: {
|
||||
message: (
|
||||
ws: ServerWebSocket,
|
||||
message: string | ArrayBuffer | Uint8Array,
|
||||
) => void;
|
||||
message: (ws: ServerWebSocket, message: string | ArrayBuffer | Uint8Array) => void;
|
||||
open?: (ws: ServerWebSocket) => void;
|
||||
close?: (ws: ServerWebSocket) => void;
|
||||
error?: (ws: ServerWebSocket, error: Error) => void;
|
||||
@@ -310,11 +278,7 @@ type Compressor =
|
||||
|
||||
interface Server {
|
||||
pendingWebsockets: number;
|
||||
publish(
|
||||
topic: string,
|
||||
data: string | ArrayBufferView | ArrayBuffer,
|
||||
compress?: boolean,
|
||||
): number;
|
||||
publish(topic: string, data: string | ArrayBufferView | ArrayBuffer, compress?: boolean): number;
|
||||
upgrade(
|
||||
req: Request,
|
||||
options?: {
|
||||
|
||||
@@ -47,9 +47,6 @@ registry = "https://registry.yarnpkg.com/"
|
||||
# Install for production? This is the equivalent to the "--production" CLI argument
|
||||
production = false
|
||||
|
||||
# Disallow changes to lockfile? This is the equivalent to the "--fozen-lockfile" CLI argument
|
||||
frozenLockfile = false
|
||||
|
||||
# Don't actually install
|
||||
dryRun = true
|
||||
|
||||
@@ -111,7 +108,6 @@ export interface Install {
|
||||
scopes: Scopes;
|
||||
registry: Registry;
|
||||
production: boolean;
|
||||
frozenLockfile: boolean;
|
||||
dryRun: boolean;
|
||||
optional: boolean;
|
||||
dev: boolean;
|
||||
|
||||
@@ -49,12 +49,6 @@ To install in production mode (i.e. without `devDependencies`):
|
||||
$ bun install --production
|
||||
```
|
||||
|
||||
To install dependencies without allowing changes to lockfile (useful on CI):
|
||||
|
||||
```bash
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
@@ -86,9 +80,6 @@ peer = false
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
```
|
||||
@@ -194,7 +185,7 @@ $ cd /path/to/my-app
|
||||
$ bun link cool-pkg
|
||||
```
|
||||
|
||||
In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`:
|
||||
This will add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`.
|
||||
|
||||
```json-diff
|
||||
{
|
||||
|
||||
@@ -102,7 +102,7 @@ To debug environment variables, run `bun run env` to view a list of resolved env
|
||||
|
||||
Bun is designed to start fast and run fast.
|
||||
|
||||
Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. In most cases, the startup and running performance is faster than V8, the engine used by Node.js and Chromium-based browsers. Its transpiler and runtime are written in Zig, a modern, high-performance language. On Linux, this translates into startup times [4x faster](https://twitter.com/jarredsumner/status/1499225725492076544) than Node.js.
|
||||
Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. In most cases, the startup and running performance is faster than V8, the engine used by Node.js and Chromium-based browsers. It's transpiler and runtime are written in Zig, a modern, high-performance language. On Linux, this translates into startup times [4x faster](https://twitter.com/jarredsumner/status/1499225725492076544) than Node.js.
|
||||
|
||||
{% image src="/images/bun-run-speed.jpeg" caption="Bun vs Node.js vs Deno running Hello World" /%}
|
||||
|
||||
|
||||
@@ -77,8 +77,6 @@ Bun is compatible with popular UI testing libraries:
|
||||
- [DOM Testing Library](https://testing-library.com/docs/dom-testing-library/intro/)
|
||||
- [React Testing Library](https://testing-library.com/docs/react-testing-library/intro)
|
||||
|
||||
See [Test > DOM Testing](/docs/test/dom) for complete documentation.
|
||||
|
||||
## Performance
|
||||
|
||||
Bun's test runner is fast.
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
[Stric](https://github.com/bunsvr) is a minimalist, fast web framework for Bun.
|
||||
|
||||
```ts#index.ts
|
||||
import { Router } from '@stricjs/router';
|
||||
import { App } from "@stricjs/core";
|
||||
|
||||
// Export the fetch handler and serve with Bun
|
||||
export default new Router()
|
||||
// Return 'Hi' on every request
|
||||
.get('/', () => new Response('Hi'));
|
||||
export default new App()
|
||||
// Return "Hi!" on every request
|
||||
.use(() => new Response("Hi!"));
|
||||
```
|
||||
|
||||
Stric provides support for [ArrowJS](https://www.arrow-js.com), a library for building reactive interfaces.
|
||||
Stric provides support for [ArrowJS](https://www.arrow-js.com), a library for building reactive interfaces in **native** JavaScript.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/App.ts
|
||||
import { html } from '@stricjs/arrow/utils';
|
||||
import { html } from "@stricjs/arrow/utils";
|
||||
|
||||
// Code inside this function can use web APIs
|
||||
export function render() {
|
||||
@@ -23,10 +23,10 @@ export function render() {
|
||||
};
|
||||
|
||||
// Set the path to handle
|
||||
export const path = '/';
|
||||
export const path = "/";
|
||||
```
|
||||
```ts#index.ts
|
||||
import { PageRouter } from '@stricjs/arrow';
|
||||
import { PageRouter } from "@stricjs/arrow";
|
||||
|
||||
// Create a page router, build and serve directly
|
||||
new PageRouter().serve();
|
||||
|
||||
@@ -49,12 +49,6 @@ To install in production mode (i.e. without `devDependencies`):
|
||||
$ bun install --production
|
||||
```
|
||||
|
||||
To install dependencies without allowing changes to lockfile (useful on CI):
|
||||
|
||||
```bash
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
@@ -86,9 +80,6 @@ peer = false
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
```
|
||||
|
||||
@@ -3,19 +3,19 @@ Bun ships as a single executable that can be installed a few different ways.
|
||||
{% callout %}
|
||||
**Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
|
||||
|
||||
**Linux users** — The `unzip` package is required to install Bun. Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
**Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
{% /callout %}
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```bash#NPM
|
||||
$ npm install -g bun # the last `npm` command you'll ever need
|
||||
```
|
||||
|
||||
```bash#Native
|
||||
$ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
|
||||
```
|
||||
|
||||
```bash#npm
|
||||
$ npm install -g bun # the last `npm` command you'll ever need
|
||||
```
|
||||
|
||||
```bash#Homebrew
|
||||
$ brew tap oven-sh/bun # for macOS and Linux
|
||||
$ brew install bun
|
||||
@@ -26,7 +26,7 @@ $ docker pull oven/bun
|
||||
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
|
||||
```
|
||||
|
||||
```bash#Proto
|
||||
```bash#proto
|
||||
$ proto install bun
|
||||
```
|
||||
|
||||
|
||||
@@ -190,9 +190,6 @@ export default {
|
||||
page("test/snapshots", "Snapshots", {
|
||||
description: "Add lifecycle hooks to your tests that run before/after each test or test run",
|
||||
}),
|
||||
page("test/dom", "DOM testing", {
|
||||
description: "Write headless tests for UI and React/Vue/Svelte/Lit components with happy-dom",
|
||||
}),
|
||||
page("test/hot", "Watch mode", {
|
||||
description: "Reload your tests automatically on change.",
|
||||
}),
|
||||
|
||||
@@ -42,7 +42,7 @@ $ brew install llvm@15
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
# On Ubuntu 22.04 and newer, LLVM 15 is available in the default repositories
|
||||
$ sudo apt install llvm-15 lld-15 clang-15
|
||||
$ sudo apt install llvm-15 lld-15
|
||||
# On older versions,
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 15 all
|
||||
```
|
||||
@@ -102,7 +102,7 @@ Zig can be installed either with our npm package [`@oven/zig`](https://www.npmjs
|
||||
|
||||
```bash
|
||||
$ bun install -g @oven/zig
|
||||
$ zigup 0.11.0-dev.3737+9eb008717
|
||||
$ zigup master
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
@@ -129,9 +129,6 @@ peer = false
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
```
|
||||
|
||||
@@ -108,8 +108,8 @@ Once it finds the `foo` package, Bun reads the `package.json` to determine how t
|
||||
"worker": "./index.js",
|
||||
"module": "./index.js",
|
||||
"node": "./index.js",
|
||||
"default": "./index.js",
|
||||
"browser": "./index.js" // lowest priority
|
||||
"browser": "./index.js",
|
||||
"default": "./index.js" // lowest priority
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
Bun's test runner plays well with existing component and DOM testing libraries, including React Testing Library and [`happy-dom`](https://github.com/capricorn86/happy-dom).
|
||||
|
||||
## `happy-dom`
|
||||
|
||||
For writing headless tests for your frontend code and components, we recommend [`happy-dom`](https://github.com/capricorn86/happy-dom). Happy DOM implements a complete set of HTML and DOM APIs in plain JavaScript, making it possible to simulate a browser environment with high fidelity.
|
||||
|
||||
To get started install the `@happy-dom/global-registrator` package as a dev dependency.
|
||||
|
||||
```bash
|
||||
$ bun add -d @happy-dom/global-registrator
|
||||
```
|
||||
|
||||
We'll be using Bun's _preload_ functionality to register the `happy-dom` globals before running our tests. This step will make browser APIs like `document` available in the global scope. Create a file called `happydom.ts` in the root of your project and add the following code:
|
||||
|
||||
```ts
|
||||
import { GlobalRegistrator } from "@happy-dom/global-registrator";
|
||||
|
||||
GlobalRegistrator.register();
|
||||
```
|
||||
|
||||
To preload this file before `bun test`, open or create a `bunfig.toml` file and add the following lines.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
preload = "./happydom.ts"
|
||||
```
|
||||
|
||||
This will execute `happydom.ts` when you run `bun test`. Now you can write tests that use browser APIs like `document` and `window`.
|
||||
|
||||
```ts#dom.test.ts
|
||||
import {test, expect} from 'bun:test';
|
||||
|
||||
test('dom test', () => {
|
||||
document.body.innerHTML = `<button>My button</button>`;
|
||||
const button = document.querySelector('button');
|
||||
expect(button?.innerText).toEqual('My button');
|
||||
});
|
||||
```
|
||||
|
||||
Depending on your `tsconfig.json` setup, you may see a `"Cannot find name 'document'"` type error in the code above. To "inject" the types for `document` and other browser APIs, add the following [triple-slash directive](https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html) to the top of any test file.
|
||||
|
||||
```ts-diff#dom.test.ts
|
||||
+ /// <reference lib="dom" />
|
||||
|
||||
import {test, expect} from 'bun:test';
|
||||
|
||||
test('dom test', () => {
|
||||
document.body.innerHTML = `<button>My button</button>`;
|
||||
const button = document.querySelector('button');
|
||||
expect(button?.innerText).toEqual('My button');
|
||||
});
|
||||
```
|
||||
|
||||
Let's run this test with `bun test`:
|
||||
|
||||
```bash
|
||||
$ bun test
|
||||
bun test v0.x.y
|
||||
|
||||
dom.test.ts:
|
||||
✓ dom test [0.82ms]
|
||||
|
||||
1 pass
|
||||
0 fail
|
||||
1 expect() calls
|
||||
Ran 1 tests across 1 files. 1 total [125.00ms]
|
||||
```
|
||||
|
||||
<!-- ## React Testing Library
|
||||
|
||||
Once you've set up `happy-dom` as described above, you can use it with React Testing Library. To get started, install the `@testing-library/react` package as a dev dependency.
|
||||
|
||||
```bash
|
||||
$ bun add -d @testing-library/react
|
||||
``` -->
|
||||
@@ -167,12 +167,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.anything()`](https://jestjs.io/docs/expect#expectanything)
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.any()`](https://jestjs.io/docs/expect#expectanyconstructor)
|
||||
|
||||
---
|
||||
@@ -202,12 +202,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.stringContaining()`](https://jestjs.io/docs/expect#expectstringcontainingstring)
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.stringMatching()`](https://jestjs.io/docs/expect#expectstringmatchingstring--regexp)
|
||||
|
||||
---
|
||||
@@ -227,12 +227,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.toHaveBeenCalled()`](https://jestjs.io/docs/expect#tohavebeencalled)
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.toHaveBeenCalledTimes()`](https://jestjs.io/docs/expect#tohavebeencalledtimesnumber)
|
||||
|
||||
---
|
||||
@@ -312,12 +312,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.toMatch()`](https://jestjs.io/docs/expect#tomatchregexp--string)
|
||||
|
||||
---
|
||||
|
||||
- 🟢
|
||||
- 🔴
|
||||
- [`.toMatchObject()`](https://jestjs.io/docs/expect#tomatchobjectobject)
|
||||
|
||||
---
|
||||
|
||||
@@ -3,8 +3,7 @@ import { parse } from "querystring";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
if (req.url === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
|
||||
var pathname = req.url.substring(1);
|
||||
const q = pathname.indexOf("?");
|
||||
|
||||
@@ -91,7 +91,7 @@ pub const Arguments = struct {
|
||||
var raw_args: std.ArrayListUnmanaged(string) = undefined;
|
||||
|
||||
if (positionals.len > 0) {
|
||||
raw_args = .{ .capacity = positionals.len, .items = @ptrFromInt([*][]const u8, @intFromPtr(positionals.ptr))[0..positionals.len] };
|
||||
raw_args = .{ .capacity = positionals.len, .items = @intToPtr([*][]const u8, @ptrToInt(positionals.ptr))[0..positionals.len] };
|
||||
} else {
|
||||
raw_args = .{};
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ pub const Arguments = struct {
|
||||
var raw_args: std.ArrayListUnmanaged(string) = undefined;
|
||||
|
||||
if (positionals.len > 0) {
|
||||
raw_args = .{ .capacity = positionals.len, .items = @ptrFromInt([*][]const u8, @intFromPtr(positionals.ptr))[0..positionals.len] };
|
||||
raw_args = .{ .capacity = positionals.len, .items = @intToPtr([*][]const u8, @ptrToInt(positionals.ptr))[0..positionals.len] };
|
||||
} else {
|
||||
raw_args = .{};
|
||||
}
|
||||
@@ -244,7 +244,7 @@ pub fn main() anyerror!void {
|
||||
const http = channel.readItem() catch continue;
|
||||
read_count += 1;
|
||||
|
||||
Output.printElapsed(@floatCast(f64, @floatFromInt(f128, http.elapsed) / std.time.ns_per_ms));
|
||||
Output.printElapsed(@floatCast(f64, @intToFloat(f128, http.elapsed) / std.time.ns_per_ms));
|
||||
if (http.response) |resp| {
|
||||
if (resp.status_code == 200) {
|
||||
success_count += 1;
|
||||
@@ -270,7 +270,7 @@ pub fn main() anyerror!void {
|
||||
http.client.url.href,
|
||||
http.response_buffer.list.items.len,
|
||||
});
|
||||
Output.printElapsed(@floatCast(f64, @floatFromInt(f128, http.gzip_elapsed) / std.time.ns_per_ms));
|
||||
Output.printElapsed(@floatCast(f64, @intToFloat(f128, http.gzip_elapsed) / std.time.ns_per_ms));
|
||||
Output.prettyError("<d> gzip)<r>\n", .{});
|
||||
} else {
|
||||
Output.prettyError(" <d>{s}<r><d> - {s}<r> <d>({d} bytes)<r>\n", .{
|
||||
@@ -295,7 +295,7 @@ pub fn main() anyerror!void {
|
||||
fail_count,
|
||||
});
|
||||
|
||||
Output.printElapsed(@floatCast(f64, @floatFromInt(f128, timer.read()) / std.time.ns_per_ms));
|
||||
Output.printElapsed(@floatCast(f64, @intToFloat(f128, timer.read()) / std.time.ns_per_ms));
|
||||
Output.prettyErrorln(" {d} requests", .{
|
||||
read_count,
|
||||
});
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"@types/react": "^18.0.25",
|
||||
"@typescript-eslint/eslint-plugin": "^5.31.0",
|
||||
"@typescript-eslint/parser": "^5.31.0",
|
||||
"bun-webkit": "0.0.1-8a03cf746abef8a48c932ab25f8821390632f2e2"
|
||||
"bun-webkit": "0.0.1-b2f1006a06f81bc860c89dd4c7cec3e7117c4c4c"
|
||||
},
|
||||
"version": "0.0.0",
|
||||
"prettier": "./.prettierrc.cjs"
|
||||
|
||||
169
packages/bun-devtools/.gitignore
vendored
Normal file
169
packages/bun-devtools/.gitignore
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
3
packages/bun-devtools/README.md
Normal file
3
packages/bun-devtools/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# bun-devtools
|
||||
|
||||
A set of auto-generated TypeScript types for the WebKit debugger protocol.
|
||||
BIN
packages/bun-devtools/bun.lockb
Executable file
BIN
packages/bun-devtools/bun.lockb
Executable file
Binary file not shown.
1
packages/bun-devtools/heap/bun.heapsnapshot
Normal file
1
packages/bun-devtools/heap/bun.heapsnapshot
Normal file
File diff suppressed because one or more lines are too long
14
packages/bun-devtools/heap/jsc.d.ts
vendored
Normal file
14
packages/bun-devtools/heap/jsc.d.ts
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
export namespace JSC {
|
||||
/**
|
||||
* @link https://github.com/WebKit/webkit/blob/main/Source/JavaScriptCore/heap/HeapSnapshotBuilder.h
|
||||
*/
|
||||
export type HeapSnapshot = {
|
||||
version: 2;
|
||||
type: "Inspector";
|
||||
nodes: number[];
|
||||
nodeClassNames: string[];
|
||||
edges: number[];
|
||||
edgeTypes: string[];
|
||||
edgeNames: string[];
|
||||
};
|
||||
}
|
||||
30
packages/bun-devtools/heap/v8.d.ts
vendored
Normal file
30
packages/bun-devtools/heap/v8.d.ts
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
export namespace V8 {
|
||||
/**
|
||||
* @link https://github.com/julianburr/chrome-heap-snapshot-parser/blob/master/index.js#L72
|
||||
* @link https://stackoverflow.com/questions/69802133/chrome-heap-snapshot-structure-explanation
|
||||
*/
|
||||
export type HeapSnapshot = {
|
||||
snapshot: {
|
||||
meta: {
|
||||
node_fields: string[];
|
||||
node_types: [string[], ...string[]]; // ?
|
||||
edge_fields: string[];
|
||||
edge_types: [string[], ...string[]]; // ?
|
||||
trace_function_info_fields: string[];
|
||||
trace_node_fields: string[];
|
||||
sample_fields: string[];
|
||||
location_fields: string[];
|
||||
node_count: number;
|
||||
edge_count: number;
|
||||
trace_function_count: number;
|
||||
};
|
||||
};
|
||||
nodes: number[];
|
||||
edges: number[];
|
||||
trace_tree: unknown[];
|
||||
trace_function_infos: unknown[];
|
||||
samples: unknown[];
|
||||
locations: number[];
|
||||
strings: string[];
|
||||
};
|
||||
}
|
||||
48430
packages/bun-devtools/heap/workerd.heapsnapshot
Normal file
48430
packages/bun-devtools/heap/workerd.heapsnapshot
Normal file
File diff suppressed because it is too large
Load Diff
2
packages/bun-devtools/index.ts
Normal file
2
packages/bun-devtools/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./protocol/jsc";
|
||||
export * from "./protocol/v8";
|
||||
24
packages/bun-devtools/package.json
Normal file
24
packages/bun-devtools/package.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"name": "bun-devtools",
|
||||
"module": "./index.ts",
|
||||
"version": "0.0.2",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./index.ts",
|
||||
"require": "./index.ts"
|
||||
}
|
||||
},
|
||||
"scripts": {
|
||||
"generate-protocol": "bun run scripts/generate-protocol.ts"
|
||||
},
|
||||
"files": [
|
||||
"index.ts",
|
||||
"package.json",
|
||||
"tsconfig.json",
|
||||
"protocol"
|
||||
],
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
1953
packages/bun-devtools/protocol/jsc.d.ts
vendored
Normal file
1953
packages/bun-devtools/protocol/jsc.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3422
packages/bun-devtools/protocol/v8.d.ts
vendored
Normal file
3422
packages/bun-devtools/protocol/v8.d.ts
vendored
Normal file
File diff suppressed because it is too large
Load Diff
129
packages/bun-devtools/scripts/client.ts
Normal file
129
packages/bun-devtools/scripts/client.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
// A DevTools client for JavaScriptCore.
|
||||
|
||||
import type { JSC } from "..";
|
||||
|
||||
type ClientOptions = {
|
||||
url: string | URL;
|
||||
event?: (event: JSC.Event<keyof JSC.EventMap>) => void;
|
||||
request?: (request: JSC.Request<keyof JSC.RequestMap>) => void;
|
||||
response?: (response: JSC.Response<keyof JSC.ResponseMap>) => void;
|
||||
};
|
||||
|
||||
class Client {
|
||||
#webSocket: WebSocket;
|
||||
#requestId: number;
|
||||
#pendingMessages: string[];
|
||||
#pendingRequests: Map<number, AbortController>;
|
||||
#ready: Promise<void>;
|
||||
|
||||
constructor(options: ClientOptions) {
|
||||
this.#webSocket = new WebSocket(options.url);
|
||||
this.#requestId = 1;
|
||||
this.#pendingMessages = [];
|
||||
this.#pendingRequests = new Map();
|
||||
this.#ready = new Promise((resolve, reject) => {
|
||||
this.#webSocket.addEventListener("open", () => {
|
||||
for (const message of this.#pendingMessages) {
|
||||
this.#send(message);
|
||||
}
|
||||
this.#pendingMessages.length = 0;
|
||||
resolve();
|
||||
});
|
||||
this.#webSocket.addEventListener("message", ({ data }) => {
|
||||
let response;
|
||||
try {
|
||||
response = { ...JSON.parse(data) };
|
||||
} catch {
|
||||
console.error("Received an invalid message:", data);
|
||||
return;
|
||||
}
|
||||
const { id, error, result, method, params } = response;
|
||||
if (method && params) {
|
||||
options.event?.(response);
|
||||
} else if (id && (result || error)) {
|
||||
try {
|
||||
options.response?.(response);
|
||||
} finally {
|
||||
const abort = this.#pendingRequests.get(id ?? -1);
|
||||
if (!abort) {
|
||||
console.error("Received an unexpected message:", response);
|
||||
return;
|
||||
}
|
||||
if (error) {
|
||||
abort.abort(new Error(JSON.stringify(error)));
|
||||
} else {
|
||||
abort.abort(result);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.error("Received an unexpected message:", response);
|
||||
}
|
||||
});
|
||||
this.#webSocket.addEventListener("error", (error) => {
|
||||
reject(error);
|
||||
});
|
||||
this.#webSocket.addEventListener("close", ({ code, reason = ""}) => {
|
||||
reject(new Error(`WebSocket closed: ${code} ${reason}`.trimEnd()));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
get ready(): Promise<void> {
|
||||
return this.#ready;
|
||||
}
|
||||
|
||||
#send(message: string): void {
|
||||
const { readyState } = this.#webSocket;
|
||||
if (readyState === WebSocket.OPEN) {
|
||||
this.#webSocket.send(message);
|
||||
} else if (readyState === WebSocket.CONNECTING) {
|
||||
this.#pendingMessages.push(message);
|
||||
} else {
|
||||
const closed = readyState === WebSocket.CLOSING ? "closing" : "closed";
|
||||
throw new Error(`WebSocket is ${closed}`);
|
||||
}
|
||||
}
|
||||
|
||||
async fetch<T extends keyof JSC.RequestMap>(method: T, params: JSC.Request<T>["params"]): Promise<JSC.Response<T>> {
|
||||
const request: JSC.Request<T> = {
|
||||
id: this.#requestId++,
|
||||
method,
|
||||
params,
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
const abort = new AbortController();
|
||||
abort.signal.addEventListener("abort", () => {
|
||||
this.#pendingRequests.delete(request.id);
|
||||
const { reason } = abort.signal;
|
||||
if (reason instanceof Error) {
|
||||
reject(reason);
|
||||
} else {
|
||||
resolve(reason);
|
||||
}
|
||||
});
|
||||
this.#pendingRequests.set(request.id, abort);
|
||||
this.#send(JSON.stringify(request));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
url: "ws://localhost:9229",
|
||||
event: (event) => console.log("EVENT:", event),
|
||||
request: (request) => console.log("REQUEST:", request),
|
||||
response: (response) => console.log("RESPONSE:", response),
|
||||
});
|
||||
await client.ready;
|
||||
|
||||
while (true) {
|
||||
const [method, ...param] = prompt(">")?.split(" ") ?? [];
|
||||
if (!method.trim()) {
|
||||
continue;
|
||||
}
|
||||
const params = !param?.length ? {} : JSON.parse(eval(`JSON.stringify(${param.join(" ")})`));
|
||||
try {
|
||||
await client.fetch(method.trim() as any, params);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
255
packages/bun-devtools/scripts/generate-protocol.ts
Normal file
255
packages/bun-devtools/scripts/generate-protocol.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
import { join } from "node:path";
|
||||
import { writeFileSync, mkdirSync } from "node:fs";
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
async function download<V>(url: string): Promise<V> {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
throw new Error(`${response.status}: ${url}`);
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
|
||||
type Protocol = {
|
||||
name: string;
|
||||
version: {
|
||||
major: number;
|
||||
minor: number;
|
||||
};
|
||||
domains: Domain[];
|
||||
};
|
||||
|
||||
type Domain = {
|
||||
domain: string;
|
||||
types: Property[];
|
||||
commands?: {
|
||||
name: string;
|
||||
description?: string;
|
||||
parameters?: Property[];
|
||||
returns?: Property[];
|
||||
}[];
|
||||
events?: {
|
||||
name: string;
|
||||
description?: string;
|
||||
parameters: Property[];
|
||||
}[];
|
||||
};
|
||||
|
||||
type Property = {
|
||||
id?: string;
|
||||
type?: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
optional?: boolean;
|
||||
} & (
|
||||
| {
|
||||
type: "array";
|
||||
items?: Property;
|
||||
}
|
||||
| {
|
||||
type: "object";
|
||||
properties?: Property[];
|
||||
}
|
||||
| {
|
||||
type: "string";
|
||||
enum?: string[];
|
||||
}
|
||||
| {
|
||||
$ref: string;
|
||||
}
|
||||
);
|
||||
|
||||
function format(property: Property): string {
|
||||
if (property.id) {
|
||||
const comment = property.description
|
||||
? `/** ${property.description} */\n`
|
||||
: "";
|
||||
const body = format({ ...property, id: undefined });
|
||||
return `${comment}export type ${property.id} = ${body};\n`;
|
||||
}
|
||||
if (property.type === "array") {
|
||||
const type = "items" in property ? format(property.items!) : "unknown";
|
||||
return `Array<${type}>`;
|
||||
}
|
||||
if (property.type === "object") {
|
||||
if (!("properties" in property)) {
|
||||
return "Record<string, unknown>";
|
||||
}
|
||||
if (property.properties!.length === 0) {
|
||||
return "{}";
|
||||
}
|
||||
const properties = property
|
||||
.properties!.map((property) => {
|
||||
const comment = property.description
|
||||
? `/** ${property.description} */\n`
|
||||
: "";
|
||||
const name = `${property.name}${property.optional ? "?" : ""}`;
|
||||
return `${comment} ${name}: ${format(property)};`;
|
||||
})
|
||||
.join("\n");
|
||||
return `{\n${properties}}`;
|
||||
}
|
||||
if (property.type === "string") {
|
||||
if (!("enum" in property)) {
|
||||
return "string";
|
||||
}
|
||||
return property.enum!.map((v) => `"${v}"`).join(" | ");
|
||||
}
|
||||
if ("$ref" in property) {
|
||||
if (/^Page|DOM|Security|CSS|IO|Emulation\./.test(property.$ref)) {
|
||||
return "unknown";
|
||||
}
|
||||
return property.$ref;
|
||||
}
|
||||
if (property.type === "integer") {
|
||||
return "number";
|
||||
}
|
||||
return property.type;
|
||||
}
|
||||
|
||||
function formatAll(protocol: Protocol): string {
|
||||
let body = "";
|
||||
const append = (property: Property) => {
|
||||
body += format(property);
|
||||
};
|
||||
const titlize = (name: string) =>
|
||||
name.charAt(0).toUpperCase() + name.slice(1);
|
||||
const events = new Map();
|
||||
const commands = new Map();
|
||||
for (const domain of protocol.domains) {
|
||||
body += `export namespace ${domain.domain} {`;
|
||||
for (const type of domain.types ?? []) {
|
||||
append(type);
|
||||
}
|
||||
for (const event of domain.events ?? []) {
|
||||
const symbol = `${domain.domain}.${event.name}`;
|
||||
const title = titlize(event.name);
|
||||
events.set(symbol, `${domain.domain}.${title}`);
|
||||
append({
|
||||
id: `${title}Event`,
|
||||
type: "object",
|
||||
description: `\`${symbol}\``,
|
||||
properties: event.parameters ?? [],
|
||||
});
|
||||
}
|
||||
for (const command of domain.commands ?? []) {
|
||||
const symbol = `${domain.domain}.${command.name}`;
|
||||
const title = titlize(command.name);
|
||||
commands.set(symbol, `${domain.domain}.${title}`);
|
||||
append({
|
||||
id: `${title}Request`,
|
||||
type: "object",
|
||||
description: `\`${symbol}\``,
|
||||
properties: command.parameters ?? [],
|
||||
});
|
||||
append({
|
||||
id: `${title}Response`,
|
||||
type: "object",
|
||||
description: `\`${symbol}\``,
|
||||
properties: command.returns ?? [],
|
||||
});
|
||||
}
|
||||
body += "};";
|
||||
}
|
||||
for (const type of ["Event", "Request", "Response"]) {
|
||||
const source = type === "Event" ? events : commands;
|
||||
append({
|
||||
id: `${type}Map`,
|
||||
type: "object",
|
||||
properties: [...source.entries()].map(([name, title]) => ({
|
||||
name: `"${name}"`,
|
||||
$ref: `${title}${type}`,
|
||||
})),
|
||||
});
|
||||
}
|
||||
body += `export type Event<T extends keyof EventMap> = {
|
||||
method: T;
|
||||
params: EventMap[T];
|
||||
};
|
||||
export type Request<T extends keyof RequestMap> = {
|
||||
id: number;
|
||||
method: T;
|
||||
params: RequestMap[T];
|
||||
};
|
||||
export type Response<T extends keyof ResponseMap> = {
|
||||
id: number;
|
||||
} & ({
|
||||
method?: T;
|
||||
result: ResponseMap[T];
|
||||
} | {
|
||||
error: {
|
||||
code?: string;
|
||||
message: string;
|
||||
};
|
||||
});`;
|
||||
return `export namespace ${protocol.name.toUpperCase()} {${body}};`;
|
||||
}
|
||||
|
||||
async function downloadV8(): Promise<Protocol> {
|
||||
const baseUrl =
|
||||
"https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json";
|
||||
const filter = [
|
||||
"Runtime",
|
||||
"Network",
|
||||
"Console",
|
||||
"Debugger",
|
||||
"Profiler",
|
||||
"HeapProfiler",
|
||||
];
|
||||
return Promise.all([
|
||||
download<Protocol>(`${baseUrl}/js_protocol.json`),
|
||||
download<Protocol>(`${baseUrl}/browser_protocol.json`),
|
||||
]).then(([js, browser]) => ({
|
||||
name: "v8",
|
||||
version: js.version,
|
||||
domains: [...js.domains, ...browser.domains]
|
||||
.filter((domain) => filter.includes(domain.domain))
|
||||
.sort((a, b) => a.domain.localeCompare(b.domain)),
|
||||
}));
|
||||
}
|
||||
|
||||
async function downloadJsc(): Promise<Protocol> {
|
||||
const baseUrl =
|
||||
"https://raw.githubusercontent.com/WebKit/WebKit/main/Source/JavaScriptCore/inspector/protocol";
|
||||
return {
|
||||
name: "jsc",
|
||||
version: {
|
||||
major: 1,
|
||||
minor: 3,
|
||||
},
|
||||
domains: await Promise.all([
|
||||
download<Domain>(`${baseUrl}/Debugger.json`),
|
||||
download<Domain>(`${baseUrl}/Heap.json`),
|
||||
download<Domain>(`${baseUrl}/ScriptProfiler.json`),
|
||||
download<Domain>(`${baseUrl}/Runtime.json`),
|
||||
download<Domain>(`${baseUrl}/Network.json`),
|
||||
download<Domain>(`${baseUrl}/Console.json`),
|
||||
download<Domain>(`${baseUrl}/GenericTypes.json`),
|
||||
]).then((domains) =>
|
||||
domains.sort((a, b) => a.domain.localeCompare(b.domain))
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
async function run(cwd: string) {
|
||||
const [jsc, v8] = await Promise.all([downloadJsc(), downloadV8()]);
|
||||
try {
|
||||
mkdirSync(cwd);
|
||||
} catch (error) {
|
||||
if (error.code !== "EEXIST") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
const write = (name: string, data: string) => {
|
||||
writeFileSync(join(cwd, name), data);
|
||||
spawnSync("bunx", ["prettier", "--write", name], { cwd, stdio: "ignore" });
|
||||
};
|
||||
// Note: Can be uncommented to inspect the JSON protocol files.
|
||||
// write("devtools/jsc.json", JSON.stringify(jsc));
|
||||
// write("devtools/v8.json", JSON.stringify(v8));
|
||||
write("jsc.d.ts", "// GENERATED - DO NOT EDIT\n" + formatAll(jsc));
|
||||
write("v8.d.ts", "// GENERATED - DO NOT EDIT\n" + formatAll(v8));
|
||||
}
|
||||
|
||||
run(join(__dirname, "..", "protocol"))
|
||||
.catch(console.error);
|
||||
18
packages/bun-devtools/tsconfig.json
Normal file
18
packages/bun-devtools/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"moduleDetection": "force",
|
||||
"allowImportingTsExtensions": true,
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"jsx": "preserve",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true,
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
@@ -4,17 +4,9 @@ import { fsyncSync, rmSync, writeFileSync, writeSync } from "fs";
|
||||
import { readdirSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import { StringDecoder } from "node:string_decoder";
|
||||
import { totalmem } from "os";
|
||||
import { relative } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const nativeMemory = totalmem();
|
||||
const BUN_JSC_forceRAMSizeNumber = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
|
||||
let BUN_JSC_forceRAMSize = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
|
||||
if (!(Number.isSafeInteger(BUN_JSC_forceRAMSizeNumber) && BUN_JSC_forceRAMSizeNumber > 0)) {
|
||||
BUN_JSC_forceRAMSize = BUN_JSC_forceRAMSizeNumber + "";
|
||||
}
|
||||
|
||||
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
|
||||
process.chdir(cwd);
|
||||
|
||||
@@ -47,8 +39,6 @@ async function runTest(path) {
|
||||
env: {
|
||||
...process.env,
|
||||
FORCE_COLOR: "1",
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
|
||||
BUN_JSC_forceRAMSize,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
|
||||
589
packages/bun-types/bun-test.d.ts
vendored
589
packages/bun-types/bun-test.d.ts
vendored
@@ -15,77 +15,6 @@
|
||||
*/
|
||||
|
||||
declare module "bun:test" {
|
||||
type AnyFunction = (...args: any) => any;
|
||||
/**
|
||||
* -- Mocks --
|
||||
*/
|
||||
export interface Mock<T extends AnyFunction>
|
||||
extends JestMock.MockInstance<T> {
|
||||
(...args: Parameters<T>): ReturnType<T>;
|
||||
}
|
||||
type _Mock<T extends AnyFunction> = Mock<T>;
|
||||
|
||||
export const mock: {
|
||||
<T extends AnyFunction>(Function: T): Mock<T>;
|
||||
};
|
||||
|
||||
interface Jest {
|
||||
restoreAllMocks(): void;
|
||||
fn<T extends AnyFunction>(func?: T): Mock<T>;
|
||||
}
|
||||
export const jest: Jest;
|
||||
export namespace jest {
|
||||
/**
|
||||
* Constructs the type of a mock function, e.g. the return type of `jest.fn()`.
|
||||
*/
|
||||
type Mock<T extends AnyFunction = AnyFunction> = _Mock<T>;
|
||||
/**
|
||||
* Wraps a class, function or object type with Jest mock type definitions.
|
||||
*/
|
||||
// type Mocked<T extends object> = JestMock.Mocked<T>;
|
||||
/**
|
||||
* Wraps a class type with Jest mock type definitions.
|
||||
*/
|
||||
// type MockedClass<T extends JestMock.ClassLike> = JestMock.MockedClass<T>;
|
||||
/**
|
||||
* Wraps a function type with Jest mock type definitions.
|
||||
*/
|
||||
// type MockedFunction<T extends AnyFunction> = JestMock.MockedFunction<T>;
|
||||
/**
|
||||
* Wraps an object type with Jest mock type definitions.
|
||||
*/
|
||||
// type MockedObject<T extends object> = JestMock.MockedObject<T>;
|
||||
/**
|
||||
* Constructs the type of a replaced property.
|
||||
*/
|
||||
type Replaced<T> = JestMock.Replaced<T>;
|
||||
/**
|
||||
* Constructs the type of a spied class or function.
|
||||
*/
|
||||
type Spied<T extends JestMock.ClassLike | AnyFunction> = JestMock.Spied<T>;
|
||||
/**
|
||||
* Constructs the type of a spied class.
|
||||
*/
|
||||
type SpiedClass<T extends JestMock.ClassLike> = JestMock.SpiedClass<T>;
|
||||
/**
|
||||
* Constructs the type of a spied function.
|
||||
*/
|
||||
type SpiedFunction<T extends AnyFunction> = JestMock.SpiedFunction<T>;
|
||||
/**
|
||||
* Constructs the type of a spied getter.
|
||||
*/
|
||||
type SpiedGetter<T> = JestMock.SpiedGetter<T>;
|
||||
/**
|
||||
* Constructs the type of a spied setter.
|
||||
*/
|
||||
type SpiedSetter<T> = JestMock.SpiedSetter<T>;
|
||||
}
|
||||
|
||||
export function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<() => T[K]>;
|
||||
|
||||
/**
|
||||
* Describes a group of related tests.
|
||||
*
|
||||
@@ -400,9 +329,6 @@ declare module "bun:test" {
|
||||
any: (
|
||||
constructor: ((..._: any[]) => any) | { new (..._: any[]): any },
|
||||
) => Expect;
|
||||
anything: () => Expect;
|
||||
stringContaining: (str: string) => Expect;
|
||||
stringMatching: (regex: RegExp | string) => Expect;
|
||||
};
|
||||
/**
|
||||
* Asserts that a value matches some criteria.
|
||||
@@ -424,20 +350,6 @@ declare module "bun:test" {
|
||||
* expect(null).not.toBeNull();
|
||||
*/
|
||||
not: Expect<unknown>;
|
||||
/**
|
||||
* Expects the value to be a promise that resolves.
|
||||
*
|
||||
* @example
|
||||
* expect(Promise.resolve(1)).resolves.toBe(1);
|
||||
*/
|
||||
resolves: Expect<unknown>;
|
||||
/**
|
||||
* Expects the value to be a promise that rejects.
|
||||
*
|
||||
* @example
|
||||
* expect(Promise.reject("error")).rejects.toBe("error");
|
||||
*/
|
||||
rejects: Expect<unknown>;
|
||||
/**
|
||||
* Asserts that a value equals what is expected.
|
||||
*
|
||||
@@ -454,24 +366,6 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBe(expected: T): void;
|
||||
/**
|
||||
* Asserts that a number is odd.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/number/#tobeodd
|
||||
* @example
|
||||
* expect(1).toBeOdd();
|
||||
* expect(2).not.toBeOdd();
|
||||
*/
|
||||
toBeOdd(): void;
|
||||
/**
|
||||
* Asserts that a number is even.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/number/#tobeeven
|
||||
* @example
|
||||
* expect(2).toBeEven();
|
||||
* expect(1).not.toBeEven();
|
||||
*/
|
||||
toBeEven(): void;
|
||||
/**
|
||||
* Asserts that value is close to the expected by floating point precision.
|
||||
*
|
||||
@@ -724,16 +618,6 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(propertyMatchers?: Object, hint?: string): void;
|
||||
/**
|
||||
* Asserts that an object matches a subset of properties.
|
||||
*
|
||||
* @example
|
||||
* expect({ a: 1, b: 2 }).toMatchObject({ b: 2 });
|
||||
* expect({ c: new Date(), d: 2 }).toMatchObject({ d: 2 });
|
||||
*
|
||||
* @param subset Subset of properties to match with.
|
||||
*/
|
||||
toMatchObject(subset: Object): void;
|
||||
/**
|
||||
* Asserts that a value is empty.
|
||||
*
|
||||
@@ -752,27 +636,6 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNil();
|
||||
*/
|
||||
toBeNil(): void;
|
||||
/**
|
||||
* Asserts that a value is a `array`.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/array/#tobearray
|
||||
* @example
|
||||
* expect([1]).toBeArray();
|
||||
* expect(new Array(1)).toBeArray();
|
||||
* expect({}).not.toBeArray();
|
||||
*/
|
||||
toBeArray(): void;
|
||||
/**
|
||||
* Asserts that a value is a `array` of a certain length.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/array/#tobearrayofsize
|
||||
* @example
|
||||
* expect([]).toBeArrayOfSize(0);
|
||||
* expect([1]).toBeArrayOfSize(1);
|
||||
* expect(new Array(1)).toBeArrayOfSize(1);
|
||||
* expect({}).not.toBeArrayOfSize(0);
|
||||
*/
|
||||
toBeArrayOfSize(size: number): void;
|
||||
/**
|
||||
* Asserts that a value is a `boolean`.
|
||||
*
|
||||
@@ -792,26 +655,6 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeTrue();
|
||||
*/
|
||||
toBeTrue(): void;
|
||||
/**
|
||||
* Asserts that a value matches a specific type.
|
||||
*
|
||||
* @link https://vitest.dev/api/expect.html#tobetypeof
|
||||
* @example
|
||||
* expect(1).toBeTypeOf("number");
|
||||
* expect("hello").toBeTypeOf("string");
|
||||
* expect([]).not.toBeTypeOf("boolean");
|
||||
*/
|
||||
toBeTypeOf(
|
||||
type:
|
||||
| "bigint"
|
||||
| "boolean"
|
||||
| "function"
|
||||
| "number"
|
||||
| "object"
|
||||
| "string"
|
||||
| "symbol"
|
||||
| "undefined",
|
||||
): void;
|
||||
/**
|
||||
* Asserts that a value is `false`.
|
||||
*
|
||||
@@ -939,18 +782,6 @@ declare module "bun:test" {
|
||||
* @param expected the string to end with
|
||||
*/
|
||||
toEndWith(expected: string): void;
|
||||
/**
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
toHaveBeenCalled(): void;
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(expected: number): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
*/
|
||||
// toHaveBeenCalledWith(...expected: Array<unknown>): void;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -958,423 +789,3 @@ declare module "test" {
|
||||
import BunTestModule = require("bun:test");
|
||||
export = BunTestModule;
|
||||
}
|
||||
|
||||
declare namespace JestMock {
|
||||
/**
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
export type ClassLike = {
|
||||
new (...args: any): any;
|
||||
};
|
||||
|
||||
export type ConstructorLikeKeys<T> = keyof {
|
||||
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
|
||||
};
|
||||
|
||||
// export const fn: <T extends FunctionLike = UnknownFunction>(
|
||||
// implementation?: T | undefined,
|
||||
// ) => Mock<T>;
|
||||
|
||||
export type FunctionLike = (...args: any) => any;
|
||||
|
||||
export type MethodLikeKeys<T> = keyof {
|
||||
[K in keyof T as Required<T>[K] extends FunctionLike ? K : never]: T[K];
|
||||
};
|
||||
|
||||
/**
|
||||
* All what the internal typings need is to be sure that we have any-function.
|
||||
* `FunctionLike` type ensures that and helps to constrain the type as well.
|
||||
* The default of `UnknownFunction` makes sure that `any`s do not leak to the
|
||||
* user side. For instance, calling `fn()` without implementation will return
|
||||
* a mock of `(...args: Array<unknown>) => unknown` type. If implementation
|
||||
* is provided, its typings are inferred correctly.
|
||||
*/
|
||||
// export interface Mock<T extends FunctionLike = UnknownFunction>
|
||||
// extends Function,
|
||||
// MockInstance<T> {
|
||||
// new (...args: Parameters<T>): ReturnType<T>;
|
||||
// (...args: Parameters<T>): ReturnType<T>;
|
||||
// }
|
||||
|
||||
// export type Mocked<T> = T extends ClassLike
|
||||
// ? MockedClass<T>
|
||||
// : T extends FunctionLike
|
||||
// ? MockedFunction<T>
|
||||
// : T extends object
|
||||
// ? MockedObject<T>
|
||||
// : T;
|
||||
|
||||
// export const mocked: {
|
||||
// <T extends object>(
|
||||
// source: T,
|
||||
// options?: {
|
||||
// shallow: false;
|
||||
// },
|
||||
// ): Mocked<T>;
|
||||
// <T_1 extends object>(
|
||||
// source: T_1,
|
||||
// options: {
|
||||
// shallow: true;
|
||||
// },
|
||||
// ): MockedShallow<T_1>;
|
||||
// };
|
||||
|
||||
// export type MockedClass<T extends ClassLike> = MockInstance<
|
||||
// (...args: ConstructorParameters<T>) => Mocked<InstanceType<T>>
|
||||
// > &
|
||||
// MockedObject<T>;
|
||||
|
||||
// export type MockedFunction<T extends FunctionLike> = MockInstance<T> &
|
||||
// MockedObject<T>;
|
||||
|
||||
// type MockedFunctionShallow<T extends FunctionLike> = MockInstance<T> & T;
|
||||
|
||||
// export type MockedObject<T extends object> = {
|
||||
// [K in keyof T]: T[K] extends ClassLike
|
||||
// ? MockedClass<T[K]>
|
||||
// : T[K] extends FunctionLike
|
||||
// ? MockedFunction<T[K]>
|
||||
// : T[K] extends object
|
||||
// ? MockedObject<T[K]>
|
||||
// : T[K];
|
||||
// } & T;
|
||||
|
||||
// type MockedObjectShallow<T extends object> = {
|
||||
// [K in keyof T]: T[K] extends ClassLike
|
||||
// ? MockedClass<T[K]>
|
||||
// : T[K] extends FunctionLike
|
||||
// ? MockedFunctionShallow<T[K]>
|
||||
// : T[K];
|
||||
// } & T;
|
||||
|
||||
// export type MockedShallow<T> = T extends ClassLike
|
||||
// ? MockedClass<T>
|
||||
// : T extends FunctionLike
|
||||
// ? MockedFunctionShallow<T>
|
||||
// : T extends object
|
||||
// ? MockedObjectShallow<T>
|
||||
// : T;
|
||||
|
||||
// export type MockFunctionMetadata<
|
||||
// T = unknown,
|
||||
// MetadataType = MockMetadataType,
|
||||
// > = MockMetadata<T, MetadataType>;
|
||||
|
||||
// export type MockFunctionMetadataType = MockMetadataType;
|
||||
|
||||
type MockFunctionResult<T extends FunctionLike = UnknownFunction> =
|
||||
| MockFunctionResultIncomplete
|
||||
| MockFunctionResultReturn<T>
|
||||
| MockFunctionResultThrow;
|
||||
|
||||
type MockFunctionResultIncomplete = {
|
||||
type: "incomplete";
|
||||
/**
|
||||
* Result of a single call to a mock function that has not yet completed.
|
||||
* This occurs if you test the result from within the mock function itself,
|
||||
* or from within a function that was called by the mock.
|
||||
*/
|
||||
value: undefined;
|
||||
};
|
||||
|
||||
type MockFunctionResultReturn<T extends FunctionLike = UnknownFunction> = {
|
||||
type: "return";
|
||||
/**
|
||||
* Result of a single call to a mock function that returned.
|
||||
*/
|
||||
value: ReturnType<T>;
|
||||
};
|
||||
|
||||
type MockFunctionResultThrow = {
|
||||
type: "throw";
|
||||
/**
|
||||
* Result of a single call to a mock function that threw.
|
||||
*/
|
||||
value: unknown;
|
||||
};
|
||||
|
||||
type MockFunctionState<T extends FunctionLike = FunctionLike> = {
|
||||
/**
|
||||
* List of the call arguments of all calls that have been made to the mock.
|
||||
*/
|
||||
calls: Array<Parameters<T>>;
|
||||
/**
|
||||
* List of all the object instances that have been instantiated from the mock.
|
||||
*/
|
||||
instances: Array<ReturnType<T>>;
|
||||
/**
|
||||
* List of all the function contexts that have been applied to calls to the mock.
|
||||
*/
|
||||
contexts: Array<ThisParameterType<T>>;
|
||||
/**
|
||||
* List of the call order indexes of the mock. Jest is indexing the order of
|
||||
* invocations of all mocks in a test file. The index is starting with `1`.
|
||||
*/
|
||||
invocationCallOrder: Array<number>;
|
||||
/**
|
||||
* List of the call arguments of the last call that was made to the mock.
|
||||
* If the function was not called, it will return `undefined`.
|
||||
*/
|
||||
lastCall?: Parameters<T>;
|
||||
/**
|
||||
* List of the results of all calls that have been made to the mock.
|
||||
*/
|
||||
results: Array<MockFunctionResult<T>>;
|
||||
};
|
||||
|
||||
export interface MockInstance<T extends FunctionLike = UnknownFunction> {
|
||||
_isMockFunction: true;
|
||||
_protoImpl: Function;
|
||||
getMockImplementation(): T | undefined;
|
||||
getMockName(): string;
|
||||
mock: MockFunctionState<T>;
|
||||
mockClear(): this;
|
||||
mockReset(): this;
|
||||
mockRestore(): void;
|
||||
mockImplementation(fn: T): this;
|
||||
mockImplementationOnce(fn: T): this;
|
||||
withImplementation(fn: T, callback: () => Promise<unknown>): Promise<void>;
|
||||
withImplementation(fn: T, callback: () => void): void;
|
||||
mockName(name: string): this;
|
||||
mockReturnThis(): this;
|
||||
mockReturnValue(value: ReturnType<T>): this;
|
||||
mockReturnValueOnce(value: ReturnType<T>): this;
|
||||
mockResolvedValue(value: ResolveType<T>): this;
|
||||
mockResolvedValueOnce(value: ResolveType<T>): this;
|
||||
mockRejectedValue(value: RejectType<T>): this;
|
||||
mockRejectedValueOnce(value: RejectType<T>): this;
|
||||
}
|
||||
|
||||
// export type MockMetadata<T, MetadataType = MockMetadataType> = {
|
||||
// ref?: number;
|
||||
// members?: Record<string, MockMetadata<T>>;
|
||||
// mockImpl?: T;
|
||||
// name?: string;
|
||||
// refID?: number;
|
||||
// type?: MetadataType;
|
||||
// value?: T;
|
||||
// length?: number;
|
||||
// };
|
||||
|
||||
// export type MockMetadataType =
|
||||
// | "object"
|
||||
// | "array"
|
||||
// | "regexp"
|
||||
// | "function"
|
||||
// | "constant"
|
||||
// | "collection"
|
||||
// | "null"
|
||||
// | "undefined";
|
||||
|
||||
// export class ModuleMocker {
|
||||
// private readonly _environmentGlobal;
|
||||
// private _mockState;
|
||||
// private _mockConfigRegistry;
|
||||
// private _spyState;
|
||||
// private _invocationCallCounter;
|
||||
// /**
|
||||
// * @see README.md
|
||||
// * @param global Global object of the test environment, used to create
|
||||
// * mocks
|
||||
// */
|
||||
// constructor(global: typeof globalThis);
|
||||
// private _getSlots;
|
||||
// private _ensureMockConfig;
|
||||
// private _ensureMockState;
|
||||
// private _defaultMockConfig;
|
||||
// private _defaultMockState;
|
||||
// private _makeComponent;
|
||||
// private _createMockFunction;
|
||||
// private _generateMock;
|
||||
// /**
|
||||
// * Check whether the given property of an object has been already replaced.
|
||||
// */
|
||||
// private _findReplacedProperty;
|
||||
// /**
|
||||
// * @see README.md
|
||||
// * @param metadata Metadata for the mock in the schema returned by the
|
||||
// * getMetadata method of this module.
|
||||
// */
|
||||
// generateFromMetadata<T>(metadata: MockMetadata<T>): Mocked<T>;
|
||||
// /**
|
||||
// * @see README.md
|
||||
// * @param component The component for which to retrieve metadata.
|
||||
// */
|
||||
// getMetadata<T = unknown>(
|
||||
// component: T,
|
||||
// _refs?: Map<T, number>,
|
||||
// ): MockMetadata<T> | null;
|
||||
// isMockFunction<T extends FunctionLike = UnknownFunction>(
|
||||
// fn: MockInstance<T>,
|
||||
// ): fn is MockInstance<T>;
|
||||
// isMockFunction<P extends Array<unknown>, R>(
|
||||
// fn: (...args: P) => R,
|
||||
// ): fn is Mock<(...args: P) => R>;
|
||||
// isMockFunction(fn: unknown): fn is Mock<UnknownFunction>;
|
||||
// fn<T extends FunctionLike = UnknownFunction>(implementation?: T): Mock<T>;
|
||||
// private _attachMockImplementation;
|
||||
// spyOn<
|
||||
// T extends object,
|
||||
// K extends PropertyLikeKeys<T>,
|
||||
// A extends "get" | "set",
|
||||
// >(
|
||||
// object: T,
|
||||
// methodKey: K,
|
||||
// accessType: A,
|
||||
// ): A extends "get"
|
||||
// ? SpiedGetter<T[K]>
|
||||
// : A extends "set"
|
||||
// ? SpiedSetter<T[K]>
|
||||
// : never;
|
||||
// spyOn<
|
||||
// T extends object,
|
||||
// K extends ConstructorLikeKeys<T> | MethodLikeKeys<T>,
|
||||
// V extends Required<T>[K],
|
||||
// >(
|
||||
// object: T,
|
||||
// methodKey: K,
|
||||
// ): V extends ClassLike | FunctionLike ? Spied<V> : never;
|
||||
// private _spyOnProperty;
|
||||
// replaceProperty<
|
||||
// T extends object,
|
||||
// K extends PropertyLikeKeys<T>,
|
||||
// V extends T[K],
|
||||
// >(object: T, propertyKey: K, value: V): Replaced<T[K]>;
|
||||
// clearAllMocks(): void;
|
||||
// resetAllMocks(): void;
|
||||
// restoreAllMocks(): void;
|
||||
// private _typeOf;
|
||||
// mocked<T extends object>(
|
||||
// source: T,
|
||||
// options?: {
|
||||
// shallow: false;
|
||||
// },
|
||||
// ): Mocked<T>;
|
||||
// mocked<T extends object>(
|
||||
// source: T,
|
||||
// options: {
|
||||
// shallow: true;
|
||||
// },
|
||||
// ): MockedShallow<T>;
|
||||
// }
|
||||
|
||||
export type PropertyLikeKeys<T> = Exclude<
|
||||
keyof T,
|
||||
ConstructorLikeKeys<T> | MethodLikeKeys<T>
|
||||
>;
|
||||
|
||||
export type RejectType<T extends FunctionLike> =
|
||||
ReturnType<T> extends PromiseLike<any> ? unknown : never;
|
||||
|
||||
export interface Replaced<T = unknown> {
|
||||
/**
|
||||
* Restore property to its original value known at the time of mocking.
|
||||
*/
|
||||
restore(): void;
|
||||
/**
|
||||
* Change the value of the property.
|
||||
*/
|
||||
replaceValue(value: T): this;
|
||||
}
|
||||
|
||||
export const replaceProperty: <
|
||||
T extends object,
|
||||
K_2 extends Exclude<
|
||||
keyof T,
|
||||
| keyof {
|
||||
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
|
||||
}
|
||||
| keyof {
|
||||
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike
|
||||
? K_1
|
||||
: never]: T[K_1];
|
||||
}
|
||||
>,
|
||||
V extends T[K_2],
|
||||
>(
|
||||
object: T,
|
||||
propertyKey: K_2,
|
||||
value: V,
|
||||
) => Replaced<T[K_2]>;
|
||||
|
||||
export type ResolveType<T extends FunctionLike> =
|
||||
ReturnType<T> extends PromiseLike<infer U> ? U : never;
|
||||
|
||||
export type Spied<T extends ClassLike | FunctionLike> = T extends ClassLike
|
||||
? SpiedClass<T>
|
||||
: T extends FunctionLike
|
||||
? SpiedFunction<T>
|
||||
: never;
|
||||
|
||||
export type SpiedClass<T extends ClassLike = UnknownClass> = MockInstance<
|
||||
(...args: ConstructorParameters<T>) => InstanceType<T>
|
||||
>;
|
||||
|
||||
export type SpiedFunction<T extends FunctionLike = UnknownFunction> =
|
||||
MockInstance<(...args: Parameters<T>) => ReturnType<T>>;
|
||||
|
||||
export type SpiedGetter<T> = MockInstance<() => T>;
|
||||
|
||||
export type SpiedSetter<T> = MockInstance<(arg: T) => void>;
|
||||
|
||||
export interface SpyInstance<T extends FunctionLike = UnknownFunction>
|
||||
extends MockInstance<T> {}
|
||||
|
||||
export const spyOn: {
|
||||
<
|
||||
T extends object,
|
||||
K_2 extends Exclude<
|
||||
keyof T,
|
||||
| keyof {
|
||||
[K in keyof T as Required<T>[K] extends ClassLike
|
||||
? K
|
||||
: never]: T[K];
|
||||
}
|
||||
| keyof {
|
||||
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike
|
||||
? K_1
|
||||
: never]: T[K_1];
|
||||
}
|
||||
>,
|
||||
V extends Required<T>[K_2],
|
||||
A extends "set" | "get",
|
||||
>(
|
||||
object: T,
|
||||
methodKey: K_2,
|
||||
accessType: A,
|
||||
): A extends "get"
|
||||
? SpiedGetter<V>
|
||||
: A extends "set"
|
||||
? SpiedSetter<V>
|
||||
: never;
|
||||
<
|
||||
T_1 extends object,
|
||||
K_5 extends
|
||||
| keyof {
|
||||
[K_3 in keyof T_1 as Required<T_1>[K_3] extends ClassLike
|
||||
? K_3
|
||||
: never]: T_1[K_3];
|
||||
}
|
||||
| keyof {
|
||||
[K_4 in keyof T_1 as Required<T_1>[K_4] extends FunctionLike
|
||||
? K_4
|
||||
: never]: T_1[K_4];
|
||||
},
|
||||
V_1 extends Required<T_1>[K_5],
|
||||
>(
|
||||
object: T_1,
|
||||
methodKey: K_5,
|
||||
): V_1 extends ClassLike | FunctionLike ? Spied<V_1> : never;
|
||||
};
|
||||
|
||||
export type UnknownClass = {
|
||||
new (...args: Array<unknown>): unknown;
|
||||
};
|
||||
|
||||
export type UnknownFunction = (...args: Array<unknown>) => unknown;
|
||||
|
||||
export {};
|
||||
}
|
||||
|
||||
289
packages/bun-types/bun.d.ts
vendored
289
packages/bun-types/bun.d.ts
vendored
@@ -51,10 +51,6 @@ declare module "bun" {
|
||||
*
|
||||
*/
|
||||
export const env: Env;
|
||||
/**
|
||||
* The raw arguments passed to the process, including flags passed to Bun. If you want to easily read flags passed to your script, consider using `process.argv` instead.
|
||||
*/
|
||||
export const argv: string[];
|
||||
export const origin: string;
|
||||
|
||||
/**
|
||||
@@ -673,29 +669,7 @@ declare module "bun" {
|
||||
/**
|
||||
* The name or path of the file, as specified in the constructor.
|
||||
*/
|
||||
readonly name?: string;
|
||||
|
||||
/**
|
||||
* Does the file exist?
|
||||
*
|
||||
* This returns true for regular files and FIFOs. It returns false for
|
||||
* directories. Note that a race condition can occur where the file is
|
||||
* deleted or renamed after this is called but before you open it.
|
||||
*
|
||||
* This does a system call to check if the file exists, which can be
|
||||
* slow.
|
||||
*
|
||||
* If using this in an HTTP server, it's faster to instead use `return new
|
||||
* Response(Bun.file(path))` and then an `error` handler to handle
|
||||
* exceptions.
|
||||
*
|
||||
* Instead of checking for a file's existence and then performing the
|
||||
* operation, it is faster to just perform the operation and handle the
|
||||
* error.
|
||||
*
|
||||
* For empty Blob, this always returns true.
|
||||
*/
|
||||
exists(): Promise<boolean>;
|
||||
name?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -770,14 +744,6 @@ declare module "bun" {
|
||||
strict?: boolean,
|
||||
): boolean;
|
||||
|
||||
/**
|
||||
* Returns true if all properties in the subset exist in the
|
||||
* other and have equal values.
|
||||
*
|
||||
* This also powers expect().toMatchObject in `bun:test`
|
||||
*/
|
||||
export function deepMatch(subset: unknown, a: unknown): boolean;
|
||||
|
||||
/**
|
||||
* tsconfig.json options supported by Bun
|
||||
*/
|
||||
@@ -1046,226 +1012,6 @@ declare module "bun" {
|
||||
// importSource?: string; // default: "react"
|
||||
// };
|
||||
}
|
||||
namespace Password {
|
||||
export type AlgorithmLabel = "bcrypt" | "argon2id" | "argon2d" | "argon2i";
|
||||
|
||||
export interface Argon2Algorithm {
|
||||
algorithm: "argon2id" | "argon2d" | "argon2i";
|
||||
/**
|
||||
* Memory cost, which defines the memory usage, given in kibibytes.
|
||||
*/
|
||||
memoryCost?: number;
|
||||
/**
|
||||
* Defines the amount of computation realized and therefore the execution
|
||||
* time, given in number of iterations.
|
||||
*/
|
||||
timeCost?: number;
|
||||
}
|
||||
|
||||
export interface BCryptAlgorithm {
|
||||
algorithm: "bcrypt";
|
||||
/**
|
||||
* A number between 4 and 31. The default is 10.
|
||||
*/
|
||||
cost?: number;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash and verify passwords using argon2 or bcrypt. The default is argon2.
|
||||
* Password hashing functions are necessarily slow, and this object will
|
||||
* automatically run in a worker thread.
|
||||
*
|
||||
* The underlying implementation of these functions are provided by the Zig
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig constributors for their
|
||||
* work on this.
|
||||
*
|
||||
* ### Example with argon2
|
||||
*
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
*
|
||||
* const hash = await password.hash("hello world");
|
||||
* const verify = await password.verify("hello world", hash);
|
||||
* console.log(verify); // true
|
||||
* ```
|
||||
*
|
||||
* ### Example with bcrypt
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
*
|
||||
* const hash = await password.hash("hello world", "bcrypt");
|
||||
* // algorithm is optional, will be inferred from the hash if not specified
|
||||
* const verify = await password.verify("hello world", hash, "bcrypt");
|
||||
*
|
||||
* console.log(verify); // true
|
||||
* ```
|
||||
*/
|
||||
export const password: {
|
||||
/**
|
||||
* Verify a password against a previously hashed password.
|
||||
*
|
||||
* @returns true if the password matches, false otherwise
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
* await password.verify("hey", "$argon2id$v=19$m=65536,t=2,p=1$ddbcyBcbAcagei7wSkZFiouX6TqnUQHmTyS5mxGCzeM$+3OIaFatZ3n6LtMhUlfWbgJyNp7h8/oIsLK+LzZO+WI");
|
||||
* // true
|
||||
* ```
|
||||
*
|
||||
* @throws If the algorithm is specified and does not match the hash
|
||||
* @throws If the algorithm is invalid
|
||||
* @throws if the hash is invalid
|
||||
*
|
||||
*/
|
||||
verify(
|
||||
/**
|
||||
* The password to verify.
|
||||
*
|
||||
* If empty, always returns false
|
||||
*/
|
||||
password: StringOrBuffer,
|
||||
/**
|
||||
* Previously hashed password.
|
||||
* If empty, always returns false
|
||||
*/
|
||||
hash: StringOrBuffer,
|
||||
/**
|
||||
* If not specified, the algorithm will be inferred from the hash.
|
||||
*
|
||||
* If specified and the algorithm does not match the hash, this function
|
||||
* throws an error.
|
||||
*/
|
||||
algorithm?: Password.AlgorithmLabel,
|
||||
): Promise<boolean>;
|
||||
/**
|
||||
* Asynchronously hash a password using argon2 or bcrypt. The default is argon2.
|
||||
*
|
||||
* @returns A promise that resolves to the hashed password
|
||||
*
|
||||
* ## Example with argon2
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
* const hash = await password.hash("hello world");
|
||||
* console.log(hash); // $argon2id$v=1...
|
||||
* const verify = await password.verify("hello world", hash);
|
||||
* ```
|
||||
* ## Example with bcrypt
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
* const hash = await password.hash("hello world", "bcrypt");
|
||||
* console.log(hash); // $2b$10$...
|
||||
* const verify = await password.verify("hello world", hash);
|
||||
* ```
|
||||
*/
|
||||
hash(
|
||||
/**
|
||||
* The password to hash
|
||||
*
|
||||
* If empty, this function throws an error. It is usually a programming
|
||||
* mistake to hash an empty password.
|
||||
*/
|
||||
password: StringOrBuffer,
|
||||
/**
|
||||
* @default "argon2id"
|
||||
*
|
||||
* When using bcrypt, passwords exceeding 72 characters will be SHA512'd before
|
||||
*/
|
||||
algorithm?:
|
||||
| Password.AlgorithmLabel
|
||||
| Password.Argon2Algorithm
|
||||
| Password.BCryptAlgorithm,
|
||||
): Promise<string>;
|
||||
|
||||
/**
|
||||
* Synchronously hash and verify passwords using argon2 or bcrypt. The default is argon2.
|
||||
* Warning: password hashing is slow, consider using {@link Bun.password.verify}
|
||||
* instead which runs in a worker thread.
|
||||
*
|
||||
* The underlying implementation of these functions are provided by the Zig
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig constributors for their
|
||||
* work on this.
|
||||
*
|
||||
* ### Example with argon2
|
||||
*
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
*
|
||||
* const hash = await password.hashSync("hello world");
|
||||
* const verify = await password.verifySync("hello world", hash);
|
||||
* console.log(verify); // true
|
||||
* ```
|
||||
*
|
||||
* ### Example with bcrypt
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
*
|
||||
* const hash = await password.hashSync("hello world", "bcrypt");
|
||||
* // algorithm is optional, will be inferred from the hash if not specified
|
||||
* const verify = await password.verifySync("hello world", hash, "bcrypt");
|
||||
*
|
||||
* console.log(verify); // true
|
||||
* ```
|
||||
*/
|
||||
verifySync(
|
||||
password: StringOrBuffer,
|
||||
hash: StringOrBuffer,
|
||||
/**
|
||||
* If not specified, the algorithm will be inferred from the hash.
|
||||
*/
|
||||
algorithm?: Password.AlgorithmLabel,
|
||||
): boolean;
|
||||
|
||||
/**
|
||||
* Synchronously hash and verify passwords using argon2 or bcrypt. The default is argon2.
|
||||
* Warning: password hashing is slow, consider using {@link Bun.password.hash}
|
||||
* instead which runs in a worker thread.
|
||||
*
|
||||
* The underlying implementation of these functions are provided by the Zig
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig constributors for their
|
||||
* work on this.
|
||||
*
|
||||
* ### Example with argon2
|
||||
*
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
*
|
||||
* const hash = await password.hashSync("hello world");
|
||||
* const verify = await password.verifySync("hello world", hash);
|
||||
* console.log(verify); // true
|
||||
* ```
|
||||
*
|
||||
* ### Example with bcrypt
|
||||
* ```ts
|
||||
* import {password} from "bun";
|
||||
*
|
||||
* const hash = await password.hashSync("hello world", "bcrypt");
|
||||
* // algorithm is optional, will be inferred from the hash if not specified
|
||||
* const verify = await password.verifySync("hello world", hash, "bcrypt");
|
||||
*
|
||||
* console.log(verify); // true
|
||||
* ```
|
||||
*/
|
||||
hashSync(
|
||||
/**
|
||||
* The password to hash
|
||||
*
|
||||
* If empty, this function throws an error. It is usually a programming
|
||||
* mistake to hash an empty password.
|
||||
*/
|
||||
password: StringOrBuffer,
|
||||
/**
|
||||
* @default "argon2id"
|
||||
*
|
||||
* When using bcrypt, passwords exceeding 72 characters will be SHA256'd before
|
||||
*/
|
||||
algorithm?:
|
||||
| Password.AlgorithmLabel
|
||||
| Password.Argon2Algorithm
|
||||
| Password.BCryptAlgorithm,
|
||||
): string;
|
||||
};
|
||||
|
||||
interface BuildArtifact extends Blob {
|
||||
path: string;
|
||||
@@ -1633,9 +1379,9 @@ declare module "bun" {
|
||||
* ```ts
|
||||
* import { websocket, serve } from "bun";
|
||||
*
|
||||
* serve<{name: string}>({
|
||||
* serve({
|
||||
* port: 3000,
|
||||
* websocket: {
|
||||
* websocket: websocket<{name: string}>({
|
||||
* open: (ws) => {
|
||||
* console.log("Client connected");
|
||||
* },
|
||||
@@ -1645,11 +1391,10 @@ declare module "bun" {
|
||||
* close: (ws) => {
|
||||
* console.log("Client disconnected");
|
||||
* },
|
||||
* },
|
||||
* }),
|
||||
*
|
||||
* fetch(req, server) {
|
||||
* const url = new URL(req.url);
|
||||
* if (url.pathname === "/chat") {
|
||||
* if (req.url === "/chat") {
|
||||
* const upgraded = server.upgrade(req, {
|
||||
* data: {
|
||||
* name: new URL(req.url).searchParams.get("name"),
|
||||
@@ -1864,9 +1609,9 @@ declare module "bun" {
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
*import { serve } from "bun";
|
||||
*import { serve, websocket } from "bun";
|
||||
*serve({
|
||||
* websocket: {
|
||||
* websocket: websocket({
|
||||
* open: (ws) => {
|
||||
* console.log("Client connected");
|
||||
* },
|
||||
@@ -1876,10 +1621,9 @@ declare module "bun" {
|
||||
* close: (ws) => {
|
||||
* console.log("Client disconnected");
|
||||
* },
|
||||
* },
|
||||
* }),
|
||||
* fetch(req, server) {
|
||||
* const url = new URL(req.url);
|
||||
* if (url.pathname === "/chat") {
|
||||
* if (req.url === "/chat") {
|
||||
* const upgraded = server.upgrade(req);
|
||||
* if (!upgraded) {
|
||||
* return new Response("Upgrade failed", { status: 400 });
|
||||
@@ -1912,9 +1656,7 @@ declare module "bun" {
|
||||
|
||||
export interface TLSWebSocketServeOptions<WebSocketDataType = undefined>
|
||||
extends WebSocketServeOptions<WebSocketDataType>,
|
||||
TLSOptions {
|
||||
tls?: TLSOptions;
|
||||
}
|
||||
TLSOptions {}
|
||||
export interface Errorlike extends Error {
|
||||
code?: string;
|
||||
errno?: number;
|
||||
@@ -2025,8 +1767,6 @@ declare module "bun" {
|
||||
* The values are SSL options objects.
|
||||
*/
|
||||
serverNames?: Record<string, TLSOptions>;
|
||||
|
||||
tls?: TLSOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2099,9 +1839,9 @@ declare module "bun" {
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* import { serve } from "bun";
|
||||
* import { serve, websocket } from "bun";
|
||||
* serve({
|
||||
* websocket: {
|
||||
* websocket: websocket({
|
||||
* open: (ws) => {
|
||||
* console.log("Client connected");
|
||||
* },
|
||||
@@ -2111,10 +1851,9 @@ declare module "bun" {
|
||||
* close: (ws) => {
|
||||
* console.log("Client disconnected");
|
||||
* },
|
||||
* },
|
||||
* }),
|
||||
* fetch(req, server) {
|
||||
* const url = new URL(req.url);
|
||||
* if (url.pathname === "/chat") {
|
||||
* if (req.url === "/chat") {
|
||||
* const upgraded = server.upgrade(req);
|
||||
* if (!upgraded) {
|
||||
* return new Response("Upgrade failed", { status: 400 });
|
||||
|
||||
97
packages/bun-types/fs.d.ts
vendored
97
packages/bun-types/fs.d.ts
vendored
@@ -19,7 +19,6 @@
|
||||
*/
|
||||
declare module "fs" {
|
||||
import * as stream from "stream";
|
||||
import type EventEmitter from "events";
|
||||
import type { SystemError, ArrayBufferView } from "bun";
|
||||
interface ObjectEncodingOptions {
|
||||
encoding?: BufferEncoding | null | undefined;
|
||||
@@ -3930,102 +3929,6 @@ declare module "fs" {
|
||||
*/
|
||||
recursive?: boolean;
|
||||
}
|
||||
|
||||
export interface FSWatcher extends EventEmitter {
|
||||
/**
|
||||
* Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable.
|
||||
* @since v0.6.8
|
||||
*/
|
||||
close(): void;
|
||||
|
||||
/**
|
||||
* When called, requests that the Node.js event loop not exit so long as the <fs.FSWatcher> is active. Calling watcher.ref() multiple times will have no effect.
|
||||
*/
|
||||
ref(): void;
|
||||
|
||||
/**
|
||||
* When called, the active <fs.FSWatcher> object will not require the Node.js event loop to remain active. If there is no other activity keeping the event loop running, the process may exit before the <fs.FSWatcher> object's callback is invoked. Calling watcher.unref() multiple times will have no effect.
|
||||
*/
|
||||
unref(): void;
|
||||
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. change
|
||||
* 2. error
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this;
|
||||
addListener(event: 'error', listener: (error: Error) => void): this;
|
||||
addListener(event: 'close', listener: () => void): this;
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this;
|
||||
on(event: 'error', listener: (error: Error) => void): this;
|
||||
on(event: 'close', listener: () => void): this;
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this;
|
||||
once(event: 'error', listener: (error: Error) => void): this;
|
||||
once(event: 'close', listener: () => void): this;
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this;
|
||||
prependListener(event: 'error', listener: (error: Error) => void): this;
|
||||
prependListener(event: 'close', listener: () => void): this;
|
||||
prependOnceListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependOnceListener(event: 'change', listener: (eventType: string, filename: string | Buffer) => void): this;
|
||||
prependOnceListener(event: 'error', listener: (error: Error) => void): this;
|
||||
prependOnceListener(event: 'close', listener: () => void): this;
|
||||
}
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a
|
||||
* directory.
|
||||
*
|
||||
* The second argument is optional. If `options` is provided as a string, it
|
||||
* specifies the `encoding`. Otherwise `options` should be passed as an object.
|
||||
*
|
||||
* The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file
|
||||
* which triggered the event.
|
||||
*
|
||||
* On most platforms, `'rename'` is emitted whenever a filename appears or
|
||||
* disappears in the directory.
|
||||
*
|
||||
* The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`.
|
||||
*
|
||||
* If a `signal` is passed, aborting the corresponding AbortController will close
|
||||
* the returned `fs.FSWatcher`.
|
||||
* @since v0.6.8
|
||||
* @param listener
|
||||
*/
|
||||
export function watch(
|
||||
filename: PathLike,
|
||||
options:
|
||||
| (WatchOptions & {
|
||||
encoding: 'buffer';
|
||||
})
|
||||
| 'buffer',
|
||||
listener?: WatchListener<Buffer>
|
||||
): FSWatcher;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
export function watch(filename: PathLike, options?: WatchOptions | BufferEncoding | null, listener?: WatchListener<string>): FSWatcher;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
export function watch(filename: PathLike, options: WatchOptions | string, listener?: WatchListener<string | Buffer>): FSWatcher;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
*/
|
||||
export function watch(filename: PathLike, listener?: WatchListener<string>): FSWatcher;
|
||||
}
|
||||
|
||||
declare module "node:fs" {
|
||||
|
||||
58
packages/bun-types/fs/promises.d.ts
vendored
58
packages/bun-types/fs/promises.d.ts
vendored
@@ -26,7 +26,6 @@ declare module "fs/promises" {
|
||||
Abortable,
|
||||
RmOptions,
|
||||
RmDirOptions,
|
||||
WatchOptions,
|
||||
} from "node:fs";
|
||||
|
||||
const constants: typeof import("node:fs")["constants"];
|
||||
@@ -710,63 +709,6 @@ declare module "fs/promises" {
|
||||
* To remove a directory recursively, use `fs.promises.rm()` instead, with the `recursive` option set to `true`.
|
||||
*/
|
||||
function rmdir(path: PathLike, options?: RmDirOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory.
|
||||
*
|
||||
* ```js
|
||||
* const { watch } = require('node:fs/promises');
|
||||
*
|
||||
* const ac = new AbortController();
|
||||
* const { signal } = ac;
|
||||
* setTimeout(() => ac.abort(), 10000);
|
||||
*
|
||||
* (async () => {
|
||||
* try {
|
||||
* const watcher = watch(__filename, { signal });
|
||||
* for await (const event of watcher)
|
||||
* console.log(event);
|
||||
* } catch (err) {
|
||||
* if (err.name === 'AbortError')
|
||||
* return;
|
||||
* throw err;
|
||||
* }
|
||||
* })();
|
||||
* ```
|
||||
*
|
||||
* On most platforms, `'rename'` is emitted whenever a filename appears or
|
||||
* disappears in the directory.
|
||||
*
|
||||
* All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`.
|
||||
* @since v0.6.8
|
||||
* @return of objects with the properties:
|
||||
*/
|
||||
function watch(
|
||||
filename: PathLike,
|
||||
options:
|
||||
| (WatchOptions & {
|
||||
encoding: 'buffer';
|
||||
})
|
||||
| 'buffer'
|
||||
): AsyncIterable<FileChangeInfo<Buffer>>;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
function watch(filename: PathLike, options?: WatchOptions | BufferEncoding): AsyncIterable<FileChangeInfo<string>>;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
function watch(filename: PathLike, options: WatchOptions | string): AsyncIterable<FileChangeInfo<string>> | AsyncIterable<FileChangeInfo<Buffer>>;
|
||||
}
|
||||
|
||||
declare module "node:fs/promises" {
|
||||
|
||||
88
packages/bun-types/globals.d.ts
vendored
88
packages/bun-types/globals.d.ts
vendored
@@ -888,12 +888,6 @@ type ReadableStreamController<T> = ReadableStreamDefaultController<T>;
|
||||
type ReadableStreamDefaultReadResult<T> =
|
||||
| ReadableStreamDefaultReadValueResult<T>
|
||||
| ReadableStreamDefaultReadDoneResult;
|
||||
interface ReadableStreamDefaultReadManyResult<T> {
|
||||
done: boolean;
|
||||
/** Number of bytes */
|
||||
size: number;
|
||||
value: T[];
|
||||
}
|
||||
type ReadableStreamReader<T> = ReadableStreamDefaultReader<T>;
|
||||
|
||||
interface RequestInit {
|
||||
@@ -2267,8 +2261,7 @@ declare var ReadableStreamDefaultController: {
|
||||
interface ReadableStreamDefaultReader<R = any>
|
||||
extends ReadableStreamGenericReader {
|
||||
read(): Promise<ReadableStreamDefaultReadResult<R>>;
|
||||
/** Only available in Bun. If there are multiple chunks in the queue, this will return all of them at the same time. */
|
||||
readMany(): Promise<ReadableStreamDefaultReadManyResult<R>>;
|
||||
readMany(): Promise<ReadableStreamDefaultReadValueResult<R>>;
|
||||
releaseLock(): void;
|
||||
}
|
||||
|
||||
@@ -3204,82 +3197,3 @@ declare module "*.txt" {
|
||||
var text: string;
|
||||
export = text;
|
||||
}
|
||||
|
||||
interface EventSourceEventMap {
|
||||
error: Event;
|
||||
message: MessageEvent;
|
||||
open: Event;
|
||||
}
|
||||
|
||||
interface EventSource extends EventTarget {
|
||||
onerror: ((this: EventSource, ev: ErrorEvent) => any) | null;
|
||||
onmessage: ((this: EventSource, ev: MessageEvent) => any) | null;
|
||||
onopen: ((this: EventSource, ev: Event) => any) | null;
|
||||
/** Returns the state of this EventSource object's connection. It can have the values described below. */
|
||||
readonly readyState: number;
|
||||
/** Returns the URL providing the event stream. */
|
||||
readonly url: string;
|
||||
/** Returns true if the credentials mode for connection requests to the URL providing the event stream is set to "include", and false otherwise.
|
||||
*
|
||||
* Not supported in Bun
|
||||
*
|
||||
*/
|
||||
readonly withCredentials: boolean;
|
||||
/** Aborts any instances of the fetch algorithm started for this EventSource object, and sets the readyState attribute to CLOSED. */
|
||||
close(): void;
|
||||
readonly CLOSED: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
addEventListener<K extends keyof EventSourceEventMap>(
|
||||
type: K,
|
||||
listener: (this: EventSource, ev: EventSourceEventMap[K]) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: (this: EventSource, event: MessageEvent) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
removeEventListener<K extends keyof EventSourceEventMap>(
|
||||
type: K,
|
||||
listener: (this: EventSource, ev: EventSourceEventMap[K]) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: (this: EventSource, event: MessageEvent) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Keep the event loop alive while connection is open or reconnecting
|
||||
*
|
||||
* Not available in browsers
|
||||
*/
|
||||
ref(): void;
|
||||
|
||||
/**
|
||||
* Do not keep the event loop alive while connection is open or reconnecting
|
||||
*
|
||||
* Not available in browsers
|
||||
*/
|
||||
unref(): void;
|
||||
}
|
||||
|
||||
declare var EventSource: {
|
||||
prototype: EventSource;
|
||||
new (url: string | URL, eventSourceInitDict?: EventSourceInit): EventSource;
|
||||
readonly CLOSED: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
};
|
||||
|
||||
2
packages/bun-types/http.d.ts
vendored
2
packages/bun-types/http.d.ts
vendored
@@ -987,7 +987,7 @@ declare module "http" {
|
||||
* in the response to be dropped and the socket to be destroyed.
|
||||
* @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead.
|
||||
*/
|
||||
abort(): void;
|
||||
// abort(): void;
|
||||
/**
|
||||
* Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called.
|
||||
* @param timeout Milliseconds before a request times out.
|
||||
|
||||
@@ -75,8 +75,6 @@ const tsConfig = {
|
||||
skipLibCheck: true,
|
||||
jsx: "react-jsx",
|
||||
allowImportingTsExtensions: true,
|
||||
emitDeclarationOnly: true,
|
||||
composite: true,
|
||||
allowSyntheticDefaultImports: true,
|
||||
forceConsistentCasingInFileNames: true,
|
||||
allowJs: true,
|
||||
|
||||
9
packages/bun-types/sqlite.d.ts
vendored
9
packages/bun-types/sqlite.d.ts
vendored
@@ -579,9 +579,7 @@ declare module "bun:sqlite" {
|
||||
/**
|
||||
* Execute the prepared statement and return the results as an array of arrays.
|
||||
*
|
||||
* In Bun v0.6.7 and earlier, this method returned `null` if there were no
|
||||
* results instead of `[]`. This was changed in v0.6.8 to align
|
||||
* more with what people expect.
|
||||
* This is a little faster than {@link all}.
|
||||
*
|
||||
* @param params optional values to bind to the statement. If omitted, the statement is run with the last bound values or no parameters if there are none.
|
||||
*
|
||||
@@ -597,15 +595,12 @@ declare module "bun:sqlite" {
|
||||
*
|
||||
* stmt.values("foo");
|
||||
* // => [['foo']]
|
||||
*
|
||||
* stmt.values("not-found");
|
||||
* // => []
|
||||
* ```
|
||||
*
|
||||
* The following types can be used when binding parameters:
|
||||
*
|
||||
* | JavaScript type | SQLite type |
|
||||
* | ---------------|-------------|
|
||||
* | -------------- | ----------- |
|
||||
* | `string` | `TEXT` |
|
||||
* | `number` | `INTEGER` or `DECIMAL` |
|
||||
* | `boolean` | `INTEGER` (1 or 0) |
|
||||
|
||||
151
packages/bun-types/stream.d.ts
vendored
151
packages/bun-types/stream.d.ts
vendored
@@ -46,7 +46,22 @@ declare module "stream" {
|
||||
encoding?: BufferEncoding | undefined;
|
||||
read?(this: Readable, size: number): void;
|
||||
}
|
||||
class Readable<R = any> extends Stream {
|
||||
class Readable<R = any> extends Stream implements ReadableStream {
|
||||
// TODO: improve type later
|
||||
values: any;
|
||||
|
||||
readonly locked: boolean;
|
||||
cancel(reason?: any): Promise<void>;
|
||||
getReader(): ReadableStreamDefaultReader<R>;
|
||||
pipeThrough<T>(
|
||||
transform: ReadableWritablePair<T, R>,
|
||||
options?: StreamPipeOptions,
|
||||
): ReadableStream<T>;
|
||||
pipeTo(
|
||||
destination: WritableStream<R>,
|
||||
options?: StreamPipeOptions,
|
||||
): Promise<void>;
|
||||
tee(): [ReadableStream<R>, ReadableStream<R>];
|
||||
forEach(
|
||||
callbackfn: (
|
||||
value: any,
|
||||
@@ -56,23 +71,12 @@ declare module "stream" {
|
||||
thisArg?: any,
|
||||
): void;
|
||||
/**
|
||||
* A utility method for creating a `Readable` from a web `ReadableStream`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
* A utility method for creating Readable Streams out of iterators.
|
||||
*/
|
||||
static fromWeb(
|
||||
readableStream: ReadableStream,
|
||||
options?: Pick<
|
||||
ReadableOptions,
|
||||
"encoding" | "highWaterMark" | "objectMode" | "signal"
|
||||
>,
|
||||
static from(
|
||||
iterable: Iterable<any> | AsyncIterable<any>,
|
||||
options?: ReadableOptions,
|
||||
): Readable;
|
||||
/**
|
||||
* A utility method for creating a web `ReadableStream` from a `Readable`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static toWeb(streamReadable: Readable): ReadableStream;
|
||||
/**
|
||||
* Returns whether the stream has been read from or cancelled.
|
||||
*/
|
||||
@@ -508,25 +512,11 @@ declare module "stream" {
|
||||
): void;
|
||||
final?(this: Writable, callback: (error?: Error | null) => void): void;
|
||||
}
|
||||
class Writable<W = any> extends Stream {
|
||||
/**
|
||||
* A utility method for creating a `Writable` from a web `WritableStream`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static fromWeb(
|
||||
writableStream: WritableStream,
|
||||
options?: Pick<
|
||||
WritableOptions,
|
||||
"decodeStrings" | "highWaterMark" | "objectMode" | "signal"
|
||||
>,
|
||||
): Writable;
|
||||
/**
|
||||
* A utility method for creating a web `WritableStream` from a `Writable`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static toWeb(streamWritable: Writable): WritableStream;
|
||||
class Writable<W = any> extends Stream implements WritableStream {
|
||||
readonly locked: boolean;
|
||||
abort(reason?: any): Promise<void>;
|
||||
close(): Promise<void>;
|
||||
getWriter(): WritableStreamDefaultWriter<W>;
|
||||
/**
|
||||
* Is `true` if it is safe to call `writable.write()`, which means
|
||||
* the stream has not been destroyed, errored or ended.
|
||||
@@ -644,11 +634,11 @@ declare module "stream" {
|
||||
* @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
||||
*/
|
||||
write(
|
||||
chunk: W,
|
||||
chunk: any,
|
||||
callback?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
write(
|
||||
chunk: W,
|
||||
chunk: any,
|
||||
encoding: BufferEncoding,
|
||||
callback?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
@@ -861,21 +851,28 @@ declare module "stream" {
|
||||
* * `zlib streams`
|
||||
* * `crypto streams`
|
||||
*/
|
||||
type Duplex<R = any> = Readable<R> &
|
||||
Writable<R> & {
|
||||
/**
|
||||
* If `false` then the stream will automatically end the writable side when the
|
||||
* readable side ends. Set initially by the `allowHalfOpen` constructor option,
|
||||
* which defaults to `false`.
|
||||
*
|
||||
* This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
|
||||
* emitted.
|
||||
* @since v0.9.4
|
||||
*/
|
||||
allowHalfOpen: boolean;
|
||||
};
|
||||
interface DuplexConstructor {
|
||||
new <T = any>(opts?: DuplexOptions): Duplex<T>;
|
||||
class Duplex extends Readable implements Writable {
|
||||
readonly writable: boolean;
|
||||
readonly writableEnded: boolean;
|
||||
readonly writableFinished: boolean;
|
||||
readonly writableHighWaterMark: number;
|
||||
readonly writableLength: number;
|
||||
readonly writableObjectMode: boolean;
|
||||
readonly writableCorked: number;
|
||||
/**
|
||||
* If `false` then the stream will automatically end the writable side when the
|
||||
* readable side ends. Set initially by the `allowHalfOpen` constructor option,
|
||||
* which defaults to `false`.
|
||||
*
|
||||
* This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
|
||||
* emitted.
|
||||
* @since v0.9.4
|
||||
*/
|
||||
allowHalfOpen: boolean;
|
||||
constructor(opts?: DuplexOptions);
|
||||
abort(reason?: any): Promise<void>;
|
||||
close(): Promise<void>;
|
||||
getWriter(): WritableStreamDefaultWriter<any>;
|
||||
/**
|
||||
* A utility method for creating duplex streams.
|
||||
*
|
||||
@@ -897,7 +894,7 @@ declare module "stream" {
|
||||
*
|
||||
* @since v16.8.0
|
||||
*/
|
||||
from(
|
||||
static from(
|
||||
src:
|
||||
| Stream
|
||||
| Blob
|
||||
@@ -909,19 +906,39 @@ declare module "stream" {
|
||||
| Promise<any>
|
||||
| Object,
|
||||
): Duplex;
|
||||
fromWeb<T = any>(
|
||||
pair: {
|
||||
readable: ReadableStream<T>;
|
||||
writable: WritableStream<T>;
|
||||
},
|
||||
options: DuplexOptions,
|
||||
): Duplex<T>;
|
||||
toWeb<T>(stream: Duplex<T>): {
|
||||
readable: ReadableStream<T>;
|
||||
writable: WritableStream<T>;
|
||||
};
|
||||
_write(
|
||||
chunk: any,
|
||||
encoding: BufferEncoding,
|
||||
callback: (error?: Error | null) => void,
|
||||
): void;
|
||||
_writev?(
|
||||
chunks: Array<{
|
||||
chunk: any;
|
||||
encoding: BufferEncoding;
|
||||
}>,
|
||||
callback: (error?: Error | null) => void,
|
||||
): void;
|
||||
_destroy(
|
||||
error: Error | null,
|
||||
callback: (error: Error | null) => void,
|
||||
): void;
|
||||
_final(callback: (error?: Error | null) => void): void;
|
||||
write(
|
||||
chunk: any,
|
||||
encoding?: BufferEncoding,
|
||||
cb?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
write(
|
||||
chunk: any,
|
||||
cb?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
setDefaultEncoding(encoding: BufferEncoding): this;
|
||||
end(cb?: () => void): this;
|
||||
end(chunk: any, cb?: () => void): this;
|
||||
end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this;
|
||||
cork(): void;
|
||||
uncork(): void;
|
||||
}
|
||||
var Duplex: DuplexConstructor;
|
||||
type TransformCallback = (error?: Error | null, data?: any) => void;
|
||||
interface TransformOptions extends DuplexOptions {
|
||||
construct?(
|
||||
@@ -968,7 +985,7 @@ declare module "stream" {
|
||||
* * `crypto streams`
|
||||
* @since v0.9.4
|
||||
*/
|
||||
class Transform<T = any> extends Duplex<T> {
|
||||
class Transform extends Duplex {
|
||||
constructor(opts?: TransformOptions);
|
||||
_transform(
|
||||
chunk: any,
|
||||
@@ -981,7 +998,7 @@ declare module "stream" {
|
||||
* The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is
|
||||
* primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams.
|
||||
*/
|
||||
class PassThrough<T = any> extends Transform<T> {}
|
||||
class PassThrough extends Transform {}
|
||||
/**
|
||||
* Attaches an AbortSignal to a readable or writeable stream. This lets code
|
||||
* control stream destruction using an `AbortController`.
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
import { expectType } from "tsd";
|
||||
import { mock, jest } from "bun:test";
|
||||
|
||||
const mock1 = mock((arg: string) => {
|
||||
return arg.length;
|
||||
});
|
||||
|
||||
const arg1 = mock1("1");
|
||||
expectType<number>(arg1);
|
||||
mock;
|
||||
|
||||
type arg2 = jest.Spied<() => string>;
|
||||
declare var arg2: arg2;
|
||||
arg2.mock.calls[0];
|
||||
mock;
|
||||
|
||||
// @ts-expect-error
|
||||
jest.fn<() => Promise<string>>().mockReturnValue("asdf");
|
||||
// @ts-expect-error
|
||||
jest.fn<() => string>().mockReturnValue(24);
|
||||
jest.fn<() => string>().mockReturnValue("24");
|
||||
|
||||
jest.fn<() => Promise<string>>().mockResolvedValue("asdf");
|
||||
// @ts-expect-error
|
||||
jest.fn<() => string>().mockResolvedValue(24);
|
||||
// @ts-expect-error
|
||||
jest.fn<() => string>().mockResolvedValue("24");
|
||||
|
||||
jest.fn().mockClear();
|
||||
jest.fn().mockReset();
|
||||
jest.fn().mockRejectedValueOnce(new Error());
|
||||
@@ -39,8 +39,7 @@ type User = {
|
||||
|
||||
Bun.serve<User>({
|
||||
fetch(req, server) {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/chat") {
|
||||
if (req.url === "/chat") {
|
||||
if (
|
||||
server.upgrade(req, {
|
||||
data: {
|
||||
|
||||
@@ -1,287 +0,0 @@
|
||||
/// TODO: delete this once we've upgraded Zig and https://github.com/ziglang/zig/pull/15985 is merged.
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// This allocator takes an existing allocator, wraps it, and provides an interface
|
||||
/// where you can allocate without freeing, and then free it all together.
|
||||
pub const ArenaAllocator = struct {
|
||||
child_allocator: Allocator,
|
||||
state: State,
|
||||
|
||||
/// Inner state of ArenaAllocator. Can be stored rather than the entire ArenaAllocator
|
||||
/// as a memory-saving optimization.
|
||||
pub const State = struct {
|
||||
buffer_list: std.SinglyLinkedList(usize) = .{},
|
||||
end_index: usize = 0,
|
||||
|
||||
pub fn promote(self: State, child_allocator: Allocator) ArenaAllocator {
|
||||
return .{
|
||||
.child_allocator = child_allocator,
|
||||
.state = self,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn allocator(self: *ArenaAllocator) Allocator {
|
||||
return .{
|
||||
.ptr = self,
|
||||
.vtable = &.{
|
||||
.alloc = alloc,
|
||||
.resize = resize,
|
||||
.free = free,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const BufNode = std.SinglyLinkedList(usize).Node;
|
||||
|
||||
pub fn init(child_allocator: Allocator) ArenaAllocator {
|
||||
return (State{}).promote(child_allocator);
|
||||
}
|
||||
|
||||
pub fn deinit(self: ArenaAllocator) void {
|
||||
// NOTE: When changing this, make sure `reset()` is adjusted accordingly!
|
||||
|
||||
var it = self.state.buffer_list.first;
|
||||
while (it) |node| {
|
||||
// this has to occur before the free because the free frees node
|
||||
const next_it = node.next;
|
||||
const align_bits = std.math.log2_int(usize, @alignOf(BufNode));
|
||||
const alloc_buf = @ptrCast([*]u8, node)[0..node.data];
|
||||
self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress());
|
||||
it = next_it;
|
||||
}
|
||||
}
|
||||
|
||||
pub const ResetMode = union(enum) {
|
||||
/// Releases all allocated memory in the arena.
|
||||
free_all,
|
||||
/// This will pre-heat the arena for future allocations by allocating a
|
||||
/// large enough buffer for all previously done allocations.
|
||||
/// Preheating will speed up the allocation process by invoking the backing allocator
|
||||
/// less often than before. If `reset()` is used in a loop, this means that after the
|
||||
/// biggest operation, no memory allocations are performed anymore.
|
||||
retain_capacity,
|
||||
/// This is the same as `retain_capacity`, but the memory will be shrunk to
|
||||
/// this value if it exceeds the limit.
|
||||
retain_with_limit: usize,
|
||||
};
|
||||
/// Queries the current memory use of this arena.
|
||||
/// This will **not** include the storage required for internal keeping.
|
||||
pub fn queryCapacity(self: ArenaAllocator) usize {
|
||||
var size: usize = 0;
|
||||
var it = self.state.buffer_list.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
// Compute the actually allocated size excluding the
|
||||
// linked list node.
|
||||
size += node.data - @sizeOf(BufNode);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
/// Resets the arena allocator and frees all allocated memory.
|
||||
///
|
||||
/// `mode` defines how the currently allocated memory is handled.
|
||||
/// See the variant documentation for `ResetMode` for the effects of each mode.
|
||||
///
|
||||
/// The function will return whether the reset operation was successful or not.
|
||||
/// If the reallocation failed `false` is returned. The arena will still be fully
|
||||
/// functional in that case, all memory is released. Future allocations just might
|
||||
/// be slower.
|
||||
///
|
||||
/// NOTE: If `mode` is `free_mode`, the function will always return `true`.
|
||||
pub fn reset(self: *ArenaAllocator, mode: ResetMode) bool {
|
||||
// Some words on the implementation:
|
||||
// The reset function can be implemented with two basic approaches:
|
||||
// - Counting how much bytes were allocated since the last reset, and storing that
|
||||
// information in State. This will make reset fast and alloc only a teeny tiny bit
|
||||
// slower.
|
||||
// - Counting how much bytes were allocated by iterating the chunk linked list. This
|
||||
// will make reset slower, but alloc() keeps the same speed when reset() as if reset()
|
||||
// would not exist.
|
||||
//
|
||||
// The second variant was chosen for implementation, as with more and more calls to reset(),
|
||||
// the function will get faster and faster. At one point, the complexity of the function
|
||||
// will drop to amortized O(1), as we're only ever having a single chunk that will not be
|
||||
// reallocated, and we're not even touching the backing allocator anymore.
|
||||
//
|
||||
// Thus, only the first hand full of calls to reset() will actually need to iterate the linked
|
||||
// list, all future calls are just taking the first node, and only resetting the `end_index`
|
||||
// value.
|
||||
const requested_capacity = switch (mode) {
|
||||
.retain_capacity => self.queryCapacity(),
|
||||
.retain_with_limit => |limit| @min(limit, self.queryCapacity()),
|
||||
.free_all => 0,
|
||||
};
|
||||
if (requested_capacity == 0) {
|
||||
// just reset when we don't have anything to reallocate
|
||||
self.deinit();
|
||||
self.state = State{};
|
||||
return true;
|
||||
}
|
||||
const total_size = requested_capacity + @sizeOf(BufNode);
|
||||
const align_bits = std.math.log2_int(usize, @alignOf(BufNode));
|
||||
// Free all nodes except for the last one
|
||||
var it = self.state.buffer_list.first;
|
||||
const maybe_first_node = while (it) |node| {
|
||||
// this has to occur before the free because the free frees node
|
||||
const next_it = node.next;
|
||||
if (next_it == null)
|
||||
break node;
|
||||
const alloc_buf = @ptrCast([*]u8, node)[0..node.data];
|
||||
self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress());
|
||||
it = next_it;
|
||||
} else null;
|
||||
std.debug.assert(maybe_first_node == null or maybe_first_node.?.next == null);
|
||||
// reset the state before we try resizing the buffers, so we definitely have reset the arena to 0.
|
||||
self.state.end_index = 0;
|
||||
if (maybe_first_node) |first_node| {
|
||||
self.state.buffer_list.first = first_node;
|
||||
// perfect, no need to invoke the child_allocator
|
||||
if (first_node.data == total_size)
|
||||
return true;
|
||||
const first_alloc_buf = @ptrCast([*]u8, first_node)[0..first_node.data];
|
||||
if (self.child_allocator.rawResize(first_alloc_buf, align_bits, total_size, @returnAddress())) {
|
||||
// successful resize
|
||||
first_node.data = total_size;
|
||||
} else {
|
||||
// manual realloc
|
||||
const new_ptr = self.child_allocator.rawAlloc(total_size, align_bits, @returnAddress()) orelse {
|
||||
// we failed to preheat the arena properly, signal this to the user.
|
||||
return false;
|
||||
};
|
||||
self.child_allocator.rawFree(first_alloc_buf, align_bits, @returnAddress());
|
||||
const node = @ptrCast(*BufNode, @alignCast(@alignOf(BufNode), new_ptr));
|
||||
node.* = .{ .data = total_size };
|
||||
self.state.buffer_list.first = node;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) ?*BufNode {
|
||||
const actual_min_size = minimum_size + (@sizeOf(BufNode) + 16);
|
||||
const big_enough_len = prev_len + actual_min_size;
|
||||
const len = big_enough_len + big_enough_len / 2;
|
||||
const log2_align = comptime std.math.log2_int(usize, @alignOf(BufNode));
|
||||
const ptr = self.child_allocator.rawAlloc(len, log2_align, @returnAddress()) orelse
|
||||
return null;
|
||||
const buf_node = @ptrCast(*BufNode, @alignCast(@alignOf(BufNode), ptr));
|
||||
buf_node.* = .{ .data = len };
|
||||
self.state.buffer_list.prepend(buf_node);
|
||||
self.state.end_index = 0;
|
||||
return buf_node;
|
||||
}
|
||||
|
||||
fn alloc(ctx: *anyopaque, n: usize, log2_ptr_align: u8, ra: usize) ?[*]u8 {
|
||||
const self = @ptrCast(*ArenaAllocator, @alignCast(@alignOf(ArenaAllocator), ctx));
|
||||
_ = ra;
|
||||
|
||||
const ptr_align = @as(usize, 1) << @intCast(Allocator.Log2Align, log2_ptr_align);
|
||||
var cur_node = if (self.state.buffer_list.first) |first_node|
|
||||
first_node
|
||||
else
|
||||
(self.createNode(0, n + ptr_align) orelse return null);
|
||||
while (true) {
|
||||
const cur_alloc_buf = @ptrCast([*]u8, cur_node)[0..cur_node.data];
|
||||
const cur_buf = cur_alloc_buf[@sizeOf(BufNode)..];
|
||||
const addr = @intFromPtr(cur_buf.ptr) + self.state.end_index;
|
||||
const adjusted_addr = mem.alignForward(usize, addr, ptr_align);
|
||||
const adjusted_index = self.state.end_index + (adjusted_addr - addr);
|
||||
const new_end_index = adjusted_index + n;
|
||||
|
||||
if (new_end_index <= cur_buf.len) {
|
||||
const result = cur_buf[adjusted_index..new_end_index];
|
||||
self.state.end_index = new_end_index;
|
||||
return result.ptr;
|
||||
}
|
||||
|
||||
const bigger_buf_size = @sizeOf(BufNode) + new_end_index;
|
||||
const log2_align = comptime std.math.log2_int(usize, @alignOf(BufNode));
|
||||
if (self.child_allocator.rawResize(cur_alloc_buf, log2_align, bigger_buf_size, @returnAddress())) {
|
||||
cur_node.data = bigger_buf_size;
|
||||
} else {
|
||||
// Allocate a new node if that's not possible
|
||||
cur_node = self.createNode(cur_buf.len, n + ptr_align) orelse return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resize(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, new_len: usize, ret_addr: usize) bool {
|
||||
const self = @ptrCast(*ArenaAllocator, @alignCast(@alignOf(ArenaAllocator), ctx));
|
||||
_ = log2_buf_align;
|
||||
_ = ret_addr;
|
||||
|
||||
const cur_node = self.state.buffer_list.first orelse return false;
|
||||
const cur_buf = @ptrCast([*]u8, cur_node)[@sizeOf(BufNode)..cur_node.data];
|
||||
if (@intFromPtr(cur_buf.ptr) + self.state.end_index != @intFromPtr(buf.ptr) + buf.len) {
|
||||
// It's not the most recent allocation, so it cannot be expanded,
|
||||
// but it's fine if they want to make it smaller.
|
||||
return new_len <= buf.len;
|
||||
}
|
||||
|
||||
if (buf.len >= new_len) {
|
||||
self.state.end_index -= buf.len - new_len;
|
||||
return true;
|
||||
} else if (cur_buf.len - self.state.end_index >= new_len - buf.len) {
|
||||
self.state.end_index += new_len - buf.len;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
fn free(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, ret_addr: usize) void {
|
||||
_ = log2_buf_align;
|
||||
_ = ret_addr;
|
||||
|
||||
const self = @ptrCast(*ArenaAllocator, @alignCast(@alignOf(ArenaAllocator), ctx));
|
||||
|
||||
const cur_node = self.state.buffer_list.first orelse return;
|
||||
const cur_buf = @ptrCast([*]u8, cur_node)[@sizeOf(BufNode)..cur_node.data];
|
||||
|
||||
if (@intFromPtr(cur_buf.ptr) + self.state.end_index == @intFromPtr(buf.ptr) + buf.len) {
|
||||
self.state.end_index -= buf.len;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
test "ArenaAllocator (reset with preheating)" {
|
||||
var arena_allocator = ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
// provides some variance in the allocated data
|
||||
var rng_src = std.rand.DefaultPrng.init(19930913);
|
||||
const random = rng_src.random();
|
||||
var rounds: usize = 25;
|
||||
while (rounds > 0) {
|
||||
rounds -= 1;
|
||||
_ = arena_allocator.reset(.retain_capacity);
|
||||
var alloced_bytes: usize = 0;
|
||||
var total_size: usize = random.intRangeAtMost(usize, 256, 16384);
|
||||
while (alloced_bytes < total_size) {
|
||||
const size = random.intRangeAtMost(usize, 16, 256);
|
||||
const alignment = 32;
|
||||
const slice = try arena_allocator.allocator().alignedAlloc(u8, alignment, size);
|
||||
try std.testing.expect(std.mem.isAligned(@intFromPtr(slice.ptr), alignment));
|
||||
try std.testing.expectEqual(size, slice.len);
|
||||
alloced_bytes += slice.len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test "ArenaAllocator (reset while retaining a buffer)" {
|
||||
var arena_allocator = ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const a = arena_allocator.allocator();
|
||||
|
||||
// Create two internal buffers
|
||||
_ = try a.alloc(u8, 1);
|
||||
_ = try a.alloc(u8, 1000);
|
||||
|
||||
// Check that we have at least two buffers
|
||||
try std.testing.expect(arena_allocator.state.buffer_list.first.?.next != null);
|
||||
|
||||
// This retains the first allocated buffer
|
||||
try std.testing.expect(arena_allocator.reset(.{ .retain_with_limit = 1 }));
|
||||
}
|
||||
@@ -55,7 +55,7 @@ pub const version: @import("./install/semver.zig").Version = .{
|
||||
|
||||
pub fn setThreadName(name: StringTypes.stringZ) void {
|
||||
if (Environment.isLinux) {
|
||||
_ = std.os.prctl(.SET_NAME, .{@intFromPtr(name.ptr)}) catch 0;
|
||||
_ = std.os.prctl(.SET_NAME, .{@ptrToInt(name.ptr)}) catch 0;
|
||||
} else if (Environment.isMac) {
|
||||
_ = std.c.pthread_setname_np(name);
|
||||
}
|
||||
|
||||
@@ -2,18 +2,19 @@ const std = @import("std");
|
||||
|
||||
const FeatureFlags = @import("./feature_flags.zig");
|
||||
const Environment = @import("./env.zig");
|
||||
const Wyhash = std.hash.Wyhash;
|
||||
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
|
||||
const constStrToU8 = @import("root").bun.constStrToU8;
|
||||
const bun = @import("root").bun;
|
||||
pub fn isSliceInBuffer(slice: anytype, buffer: anytype) bool {
|
||||
return (@intFromPtr(&buffer) <= @intFromPtr(slice.ptr) and (@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer) + buffer.len));
|
||||
return (@ptrToInt(&buffer) <= @ptrToInt(slice.ptr) and (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer) + buffer.len));
|
||||
}
|
||||
|
||||
pub fn sliceRange(slice: []const u8, buffer: []const u8) ?[2]u32 {
|
||||
return if (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
(@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer.ptr) + buffer.len))
|
||||
return if (@ptrToInt(buffer.ptr) <= @ptrToInt(slice.ptr) and
|
||||
(@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer.ptr) + buffer.len))
|
||||
[2]u32{
|
||||
@truncate(u32, @intFromPtr(slice.ptr) - @intFromPtr(buffer.ptr)),
|
||||
@truncate(u32, @ptrToInt(slice.ptr) - @ptrToInt(buffer.ptr)),
|
||||
@truncate(u32, slice.len),
|
||||
}
|
||||
else
|
||||
@@ -52,6 +53,7 @@ pub const Result = struct {
|
||||
return r.index >= count;
|
||||
}
|
||||
};
|
||||
const Seed = 999;
|
||||
|
||||
pub const NotFound = IndexType{
|
||||
.index = std.math.maxInt(u31),
|
||||
@@ -486,7 +488,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
pub fn getOrPut(self: *Self, denormalized_key: []const u8) !Result {
|
||||
const key = if (comptime remove_trailing_slashes) std.mem.trimRight(u8, denormalized_key, "/") else denormalized_key;
|
||||
const _key = bun.hash(key);
|
||||
const _key = Wyhash.hash(Seed, key);
|
||||
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
@@ -514,7 +516,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
pub fn get(self: *Self, denormalized_key: []const u8) ?*ValueType {
|
||||
const key = if (comptime remove_trailing_slashes) std.mem.trimRight(u8, denormalized_key, "/") else denormalized_key;
|
||||
const _key = bun.hash(key);
|
||||
const _key = Wyhash.hash(Seed, key);
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
const index = self.index.get(_key) orelse return null;
|
||||
@@ -575,7 +577,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
const key = if (comptime remove_trailing_slashes) std.mem.trimRight(u8, denormalized_key, "/") else denormalized_key;
|
||||
|
||||
const _key = bun.hash(key);
|
||||
const _key = Wyhash.hash(Seed, key);
|
||||
_ = self.index.remove(_key);
|
||||
// const index = self.index.get(_key) orelse return;
|
||||
// switch (index) {
|
||||
|
||||
@@ -201,7 +201,7 @@ pub fn Writer(comptime WritableStream: type) type {
|
||||
}
|
||||
|
||||
pub inline fn writeEnum(this: *Self, val: anytype) !void {
|
||||
try this.writeInt(@intFromEnum(val));
|
||||
try this.writeInt(@enumToInt(val));
|
||||
}
|
||||
|
||||
pub fn writeValue(this: *Self, comptime SliceType: type, slice: SliceType) !void {
|
||||
|
||||
@@ -261,7 +261,7 @@ pub const GenerateHeader = struct {
|
||||
pub const GeneratePlatform = struct {
|
||||
var osversion_name: [32]u8 = undefined;
|
||||
pub fn forMac() Analytics.Platform {
|
||||
@memset(&osversion_name, 0);
|
||||
@memset(&osversion_name, 0, osversion_name.len);
|
||||
|
||||
var platform = Analytics.Platform{ .os = Analytics.OperatingSystem.macos, .version = &[_]u8{}, .arch = platform_arch };
|
||||
var len = osversion_name.len - 1;
|
||||
@@ -340,7 +340,7 @@ pub const GenerateHeader = struct {
|
||||
offset = std.mem.indexOfScalar(u8, out, '"') orelse return Analytics.Uint64{};
|
||||
out = out[0..offset];
|
||||
|
||||
const hash = bun.hash(std.mem.trim(u8, out, "\n\r "));
|
||||
const hash = std.hash.Wyhash.hash(0, std.mem.trim(u8, out, "\n\r "));
|
||||
var hash_bytes = std.mem.asBytes(&hash);
|
||||
return Analytics.Uint64{
|
||||
.first = std.mem.readIntNative(u32, hash_bytes[0..4]),
|
||||
@@ -357,7 +357,7 @@ pub const GenerateHeader = struct {
|
||||
defer file.close();
|
||||
var read_count = try file.read(&linux_machine_id);
|
||||
|
||||
const hash = bun.hash(std.mem.trim(u8, linux_machine_id[0..read_count], "\n\r "));
|
||||
const hash = std.hash.Wyhash.hash(0, std.mem.trim(u8, linux_machine_id[0..read_count], "\n\r "));
|
||||
var hash_bytes = std.mem.asBytes(&hash);
|
||||
return Analytics.Uint64{
|
||||
.first = std.mem.readIntNative(u32, hash_bytes[0..4]),
|
||||
@@ -540,8 +540,8 @@ pub const EventList = struct {
|
||||
}
|
||||
|
||||
@atomicStore(bool, &is_stuck, retry_remaining == 0, .Release);
|
||||
stuck_count += @intCast(u8, @intFromBool(retry_remaining == 0));
|
||||
stuck_count *= @intCast(u8, @intFromBool(retry_remaining == 0));
|
||||
stuck_count += @intCast(u8, @boolToInt(retry_remaining == 0));
|
||||
stuck_count *= @intCast(u8, @boolToInt(retry_remaining == 0));
|
||||
disabled = disabled or stuck_count > 4;
|
||||
|
||||
this.in_buffer.reset();
|
||||
|
||||
34
src/api/demo/.gitignore
vendored
Normal file
34
src/api/demo/.gitignore
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
34
src/api/demo/README.md
Normal file
34
src/api/demo/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
|
||||
|
||||
## Getting Started
|
||||
|
||||
First, run the development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
# or
|
||||
yarn dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
||||
|
||||
You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file.
|
||||
|
||||
[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`.
|
||||
|
||||
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages.
|
||||
|
||||
## Learn More
|
||||
|
||||
To learn more about Next.js, take a look at the following resources:
|
||||
|
||||
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
||||
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
||||
|
||||
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!
|
||||
|
||||
## Deploy on Vercel
|
||||
|
||||
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
|
||||
|
||||
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.
|
||||
0
src/api/demo/api.js
Normal file
0
src/api/demo/api.js
Normal file
BIN
src/api/demo/bun.lockb
Executable file
BIN
src/api/demo/bun.lockb
Executable file
Binary file not shown.
280
src/api/demo/lib/api.ts
Normal file
280
src/api/demo/lib/api.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
import * as Schema from "../../schema";
|
||||
import { ByteBuffer } from "peechy";
|
||||
import path from "path";
|
||||
import { Loader } from "../schema";
|
||||
// import { transform as sucraseTransform } from "sucrase";
|
||||
|
||||
export interface WebAssemblyModule {
|
||||
init(): number;
|
||||
transform(a: number): number;
|
||||
bun_malloc(a: number): number;
|
||||
bun_free(a: number): number;
|
||||
scan(a: number): number;
|
||||
}
|
||||
|
||||
const wasm_imports_sym: symbol | string =
|
||||
process.env.NODE_ENV === "development" ? "wasm_imports" : Symbol("wasm_imports");
|
||||
|
||||
const ptr_converter = new ArrayBuffer(16);
|
||||
const ptr_float = new BigUint64Array(ptr_converter);
|
||||
const slice = new Uint32Array(ptr_converter);
|
||||
|
||||
const Wasi = {
|
||||
clock_time_get(clk_id, tp) {
|
||||
return Date.now();
|
||||
},
|
||||
environ_sizes_get() {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
environ_get(__environ, environ_buf) {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
|
||||
fd_close(fd) {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
proc_exit() {},
|
||||
|
||||
fd_seek(fd, offset_bigint, whence, newOffset) {
|
||||
debugger;
|
||||
},
|
||||
fd_write(fd, iov, iovcnt, pnum) {
|
||||
debugger;
|
||||
},
|
||||
};
|
||||
|
||||
var scratch: Uint8Array;
|
||||
var scratch2: Uint8Array;
|
||||
|
||||
export class Bun {
|
||||
static has_initialized = false;
|
||||
static wasm_source: WebAssembly.WebAssemblyInstantiatedSource = null;
|
||||
static get wasm_exports(): WebAssemblyModule {
|
||||
return Bun.wasm_source.instance.exports as any;
|
||||
}
|
||||
static get memory(): WebAssembly.Memory {
|
||||
return Bun.wasm_source.instance.exports.memory as any;
|
||||
}
|
||||
|
||||
static memory_array: Uint8Array;
|
||||
|
||||
static _decoder: TextDecoder;
|
||||
|
||||
static _wasmPtrToSlice(offset: number | bigint) {
|
||||
ptr_float[0] = typeof offset === "number" ? BigInt(offset) : offset;
|
||||
return new Uint8Array(Bun.memory.buffer, slice[0], slice[1]);
|
||||
}
|
||||
|
||||
static _wasmPtrLenToString(slice: number) {
|
||||
if (!Bun._decoder) {
|
||||
Bun._decoder = new TextDecoder("utf8");
|
||||
}
|
||||
|
||||
const region = this._wasmPtrToSlice(slice);
|
||||
return Bun._decoder.decode(region);
|
||||
}
|
||||
|
||||
// We don't want people to be calling these manually
|
||||
static [wasm_imports_sym] = {
|
||||
console_log(slice: number) {
|
||||
console.log(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
console_error(slice: number) {
|
||||
console.error(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
console_warn(slice: number) {
|
||||
console.warn(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
console_info(slice: number) {
|
||||
console.info(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
|
||||
__indirect_function_table: new WebAssembly.Table({
|
||||
initial: 0,
|
||||
element: "anyfunc",
|
||||
}),
|
||||
__stack_pointer: new WebAssembly.Global({
|
||||
mutable: true,
|
||||
value: "i32",
|
||||
}),
|
||||
__multi3(one: number, two: number) {
|
||||
return Math.imul(one | 0, two | 0);
|
||||
},
|
||||
fmod(one: number, two: number) {
|
||||
return one % two;
|
||||
},
|
||||
memset(ptr: number, value: number, len: number) {
|
||||
Bun.memory_array.fill(value, ptr, ptr + len);
|
||||
},
|
||||
memcpy(ptr: number, value: number, len: number) {
|
||||
Bun.memory_array.copyWithin(ptr, value, value + len);
|
||||
},
|
||||
// These functions convert a to an unsigned long long, rounding toward zero. Negative values all become zero.
|
||||
__fixunsdfti(a: number) {
|
||||
return Math.floor(a);
|
||||
},
|
||||
// These functions return the remainder of the unsigned division of a and b.
|
||||
__umodti3(a: number, b: number) {
|
||||
return (a | 0) % (b | 0);
|
||||
},
|
||||
// These functions return the quotient of the unsigned division of a and b.
|
||||
__udivti3(a: number, b: number) {
|
||||
return (a | 0) / (b | 0);
|
||||
},
|
||||
// These functions return the result of shifting a left by b bits.
|
||||
__ashlti3(a: number, b: number) {
|
||||
return (a | 0) >> (b | 0);
|
||||
},
|
||||
/* Returns: convert a to a double, rounding toward even. */
|
||||
__floatuntidf(a: number) {
|
||||
const mod = a % 2;
|
||||
if (mod === 0) {
|
||||
return Math.ceil(a);
|
||||
} else if (mod === 1) {
|
||||
return Math.floor(a);
|
||||
}
|
||||
},
|
||||
emscripten_notify_memory_growth() {},
|
||||
};
|
||||
|
||||
static async init(url) {
|
||||
// globalThis.sucraseTransform = sucraseTransform;
|
||||
scratch = new Uint8Array(8096);
|
||||
|
||||
if (Bun.has_initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
Bun.wasm_source = await globalThis.WebAssembly.instantiateStreaming(fetch(url), {
|
||||
env: Bun[wasm_imports_sym],
|
||||
wasi_snapshot_preview1: Wasi,
|
||||
});
|
||||
|
||||
const res = Bun.wasm_exports.init();
|
||||
if (res < 0) {
|
||||
throw `[Bun] Failed to initialize WASM module: code ${res}`;
|
||||
} else {
|
||||
console.log("WASM loaded.");
|
||||
}
|
||||
|
||||
Bun.has_initialized = true;
|
||||
}
|
||||
|
||||
static transformSync(content: Uint8Array | string, file_name: string) {
|
||||
if (!Bun.has_initialized) {
|
||||
throw "Please run await Bun.init(wasm_url) before using this.";
|
||||
}
|
||||
|
||||
// if (process.env.NODE_ENV === "development") {
|
||||
// console.time("[Bun] Transform " + file_name);
|
||||
// }
|
||||
|
||||
const bb = new ByteBuffer(scratch);
|
||||
bb.length = 0;
|
||||
bb.index = 0;
|
||||
var contents_buffer;
|
||||
if (typeof content === "string") {
|
||||
if (!scratch2) {
|
||||
scratch2 = new Uint8Array(content.length * 2);
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
for (; i < content.length; i++) {
|
||||
if (i > scratch2.length) {
|
||||
var scratch3 = new Uint8Array(scratch2.length * 2);
|
||||
scratch3.set(scratch2);
|
||||
scratch2 = scratch3;
|
||||
}
|
||||
scratch2[i] = content.charCodeAt(i);
|
||||
}
|
||||
contents_buffer = scratch2.subarray(0, i);
|
||||
} else {
|
||||
contents_buffer = content;
|
||||
}
|
||||
|
||||
Schema.encodeTransform(
|
||||
{
|
||||
contents: contents_buffer,
|
||||
path: file_name,
|
||||
loader: {
|
||||
".jsx": Loader.jsx,
|
||||
".tsx": Loader.tsx,
|
||||
".ts": Loader.ts,
|
||||
".js": Loader.js,
|
||||
".json": Loader.json,
|
||||
}[path.extname(file_name)],
|
||||
},
|
||||
bb,
|
||||
);
|
||||
const data = bb.toUint8Array();
|
||||
|
||||
const input_ptr = Bun.wasm_exports.bun_malloc(data.length);
|
||||
var buffer = this._wasmPtrToSlice(input_ptr);
|
||||
buffer.set(data);
|
||||
|
||||
const resp_ptr = Bun.wasm_exports.transform(input_ptr);
|
||||
var _bb = new ByteBuffer(this._wasmPtrToSlice(resp_ptr));
|
||||
const response = Schema.decodeTransformResponse(_bb);
|
||||
Bun.wasm_exports.bun_free(input_ptr);
|
||||
scratch = bb.data;
|
||||
return response;
|
||||
}
|
||||
|
||||
static scan(content: Uint8Array | string, file_name: string, loader?: Loader) {
|
||||
if (!Bun.has_initialized) {
|
||||
throw "Please run await Bun.init(wasm_url) before using this.";
|
||||
}
|
||||
|
||||
// if (process.env.NODE_ENV === "development") {
|
||||
// console.time("[Bun] Transform " + file_name);
|
||||
// }
|
||||
scratch.fill(0);
|
||||
const bb = new ByteBuffer(scratch);
|
||||
bb.length = 0;
|
||||
bb.index = 0;
|
||||
var contents_buffer;
|
||||
if (typeof content === "string") {
|
||||
if (!scratch2) {
|
||||
scratch2 = new Uint8Array(content.length * 2);
|
||||
}
|
||||
const encode_into = new TextEncoder().encodeInto(content, scratch2);
|
||||
contents_buffer = scratch2.subarray(0, encode_into.written);
|
||||
} else {
|
||||
contents_buffer = content;
|
||||
}
|
||||
|
||||
Schema.encodeScan(
|
||||
{
|
||||
contents: contents_buffer,
|
||||
path: file_name,
|
||||
loader:
|
||||
loader ||
|
||||
{
|
||||
".jsx": Loader.jsx,
|
||||
".tsx": Loader.tsx,
|
||||
".ts": Loader.ts,
|
||||
".js": Loader.js,
|
||||
".json": Loader.json,
|
||||
}[path.extname(file_name)],
|
||||
},
|
||||
bb,
|
||||
);
|
||||
const data = bb.toUint8Array();
|
||||
|
||||
const input_ptr = Bun.wasm_exports.bun_malloc(data.length);
|
||||
var buffer = this._wasmPtrToSlice(input_ptr);
|
||||
buffer.set(data);
|
||||
|
||||
const resp_ptr = Bun.wasm_exports.scan(input_ptr);
|
||||
var _bb = new ByteBuffer(this._wasmPtrToSlice(resp_ptr));
|
||||
const response = Schema.decodeScanResult(_bb);
|
||||
Bun.wasm_exports.bun_free(input_ptr);
|
||||
scratch = bb.data;
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
globalThis.Bun = Bun;
|
||||
70
src/api/demo/lib/run.ts
Normal file
70
src/api/demo/lib/run.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { transform as _transform, initialize } from "esbuild-wasm";
|
||||
import initSwc, { transformSync as transformSyncSWC } from "@swc/wasm-web";
|
||||
import { Bun } from "./api";
|
||||
|
||||
export async function start() {
|
||||
await initialize({
|
||||
worker: false,
|
||||
wasmURL: "/node_modules/esbuild-wasm/esbuild.wasm",
|
||||
});
|
||||
await Bun.init("/bun-wasm.wasm");
|
||||
await initSwc("/node_modules/@swc/wasm-web/wasm_bg.wasm");
|
||||
}
|
||||
|
||||
const swcOptions = {
|
||||
sourceMaps: false,
|
||||
inlineSourcesContent: false,
|
||||
jsc: {
|
||||
target: "es2022",
|
||||
parser: {
|
||||
jsx: true,
|
||||
syntax: "typescript",
|
||||
tsx: false,
|
||||
decorators: false,
|
||||
dynamicImport: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export async function transform(contents, file) {
|
||||
var result: any = {
|
||||
timings: {
|
||||
esbuild: 0,
|
||||
bun: 0,
|
||||
swc: 0,
|
||||
},
|
||||
};
|
||||
result.timings.esbuild = performance.now();
|
||||
result.esbuild = await _transform(contents, {
|
||||
sourcefile: file,
|
||||
loader: file.substring(file.lastIndexOf(".") + 1),
|
||||
});
|
||||
result.timings.esbuild = performance.now() - result.timings.esbuild;
|
||||
|
||||
result.timings.bun = performance.now();
|
||||
result.bun = Bun.transformSync(contents, file);
|
||||
result.timings.bun = performance.now() - result.timings.bun;
|
||||
|
||||
if (file.substring(file.lastIndexOf(".") + 1) === "tsx") {
|
||||
swcOptions.jsc.parser.tsx = true;
|
||||
swcOptions.jsc.parser.syntax = "typescript";
|
||||
} else if (file.substring(file.lastIndexOf(".") + 1) === "jsx") {
|
||||
swcOptions.jsc.parser.tsx = false;
|
||||
swcOptions.jsc.parser.jsx = true;
|
||||
swcOptions.jsc.parser.syntax = "typescript";
|
||||
} else {
|
||||
swcOptions.jsc.parser.tsx = false;
|
||||
swcOptions.jsc.parser.jsx = false;
|
||||
swcOptions.jsc.parser.syntax = "javascript";
|
||||
}
|
||||
|
||||
result.timings.swc = performance.now();
|
||||
result.swc = transformSyncSWC(contents, swcOptions as any);
|
||||
result.timings.swc = performance.now() - result.timings.swc;
|
||||
|
||||
console.log("esbuild:", result.timings.esbuild, "ms");
|
||||
console.log("Bun:", result.timings.bun, "ms");
|
||||
console.log("SWC:", result.timings.swc, "ms");
|
||||
|
||||
return result;
|
||||
}
|
||||
44
src/api/demo/lib/scan.ts
Normal file
44
src/api/demo/lib/scan.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { init, parse } from "es-module-lexer";
|
||||
|
||||
import { Bun } from "./api";
|
||||
|
||||
export async function start() {
|
||||
await init;
|
||||
await Bun.init("/bun-wasm.wasm");
|
||||
}
|
||||
|
||||
const swcOptions = {
|
||||
sourceMaps: false,
|
||||
inlineSourcesContent: false,
|
||||
jsc: {
|
||||
target: "es2022",
|
||||
parser: {
|
||||
jsx: true,
|
||||
syntax: "typescript",
|
||||
tsx: false,
|
||||
decorators: false,
|
||||
dynamicImport: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export async function transform(contents, file) {
|
||||
var result: any = {
|
||||
timings: {
|
||||
lexer: 0,
|
||||
bun: 0,
|
||||
},
|
||||
};
|
||||
result.timings.lexer = performance.now();
|
||||
result.lexer = await parse(contents, file);
|
||||
result.timings.lexer = performance.now() - result.timings.lexer;
|
||||
|
||||
result.timings.bun = performance.now();
|
||||
result.bun = Bun.scan(contents, file);
|
||||
result.timings.bun = performance.now() - result.timings.bun;
|
||||
|
||||
console.log("lexer:", result.timings.lexer, "ms");
|
||||
console.log("Bun:", result.timings.bun, "ms");
|
||||
|
||||
return result;
|
||||
}
|
||||
5
src/api/demo/next-env.d.ts
vendored
Normal file
5
src/api/demo/next-env.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
29
src/api/demo/package.json
Normal file
29
src/api/demo/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "demo",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start"
|
||||
},
|
||||
"dependencies": {
|
||||
"@swc/wasm-web": "^1.2.146",
|
||||
"bun-framework-next": "^12.1.0",
|
||||
"es-module-loader": "^2.3.0",
|
||||
"esbuild": "^0.14.23",
|
||||
"esbuild-wasm": "^0.14.23",
|
||||
"next": "12",
|
||||
"peechy": "0.4.32",
|
||||
"react": "17.0.2",
|
||||
"react-dom": "17.0.2",
|
||||
"sucrase": "^3.18.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.8",
|
||||
"bun-types": "^0.2.2",
|
||||
"typescript": "^4.3.2",
|
||||
"webpack": "^5.38.1",
|
||||
"webpack-cli": "^4.7.0"
|
||||
}
|
||||
}
|
||||
7
src/api/demo/pages/_app.js
Normal file
7
src/api/demo/pages/_app.js
Normal file
@@ -0,0 +1,7 @@
|
||||
import "../styles/globals.css";
|
||||
|
||||
function MyApp({ Component, pageProps }) {
|
||||
return <Component {...pageProps} />;
|
||||
}
|
||||
|
||||
export default MyApp;
|
||||
5
src/api/demo/pages/api/hello.js
Normal file
5
src/api/demo/pages/api/hello.js
Normal file
@@ -0,0 +1,5 @@
|
||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
||||
|
||||
export default (req, res) => {
|
||||
res.status(200).json({ name: "John Doe" });
|
||||
};
|
||||
68
src/api/demo/pages/index.tsx
Normal file
68
src/api/demo/pages/index.tsx
Normal file
@@ -0,0 +1,68 @@
|
||||
import Head from "next/head";
|
||||
import Image from "next/image";
|
||||
import styles from "../styles/Home.module.css";
|
||||
import { readFile } from "fs/promises";
|
||||
|
||||
import React from "react";
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
globalThis.Run = await import("../lib/run");
|
||||
await import("../lib/api");
|
||||
}
|
||||
|
||||
export async function getStaticProps(ctx) {
|
||||
return {
|
||||
props: {
|
||||
// not tested
|
||||
code: readFile("/Users/jarred/Build/es-module-lexer/test/samples/magic-string.js", { encoding: "utf-8" }),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
var textDecoder = new TextDecoder();
|
||||
export default function Home({ code }) {
|
||||
const fileNameRef = React.useRef<HTMLInputElement>(null);
|
||||
const [esbuildResult, setEsbuildResult] = React.useState("");
|
||||
const [bunResult, setBunResult] = React.useState("");
|
||||
const [swcResult, setSWCResult] = React.useState("");
|
||||
React.useEffect(() => {
|
||||
globalThis.Run.start();
|
||||
}, []);
|
||||
|
||||
const runBuild = React.useCallback(
|
||||
event => {
|
||||
globalThis.Run.transform(event.target.value, fileNameRef?.current?.value).then(result => {
|
||||
setEsbuildResult(result.esbuild.code);
|
||||
setBunResult(textDecoder.decode(result.bun.files[0].data));
|
||||
setSWCResult(result.swc.code);
|
||||
}, console.error);
|
||||
},
|
||||
[fileNameRef, setEsbuildResult, setBunResult, setSWCResult],
|
||||
);
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<Head>
|
||||
<title>Next.js</title>
|
||||
<meta name="description" content="Generated by create next app" />
|
||||
<link rel="icon" href="/favicon.ico" />
|
||||
</Head>
|
||||
|
||||
<main className={styles.main}>
|
||||
<div>
|
||||
<input
|
||||
autoComplete="filename"
|
||||
type="text"
|
||||
placeholder="filename"
|
||||
defaultValue="input.tsx"
|
||||
ref={fileNameRef}
|
||||
/>
|
||||
<textarea onChange={runBuild} defaultValue={code}></textarea>
|
||||
|
||||
<textarea readOnly value={esbuildResult}></textarea>
|
||||
<textarea readOnly value={bunResult}></textarea>
|
||||
<textarea readOnly value={swcResult}></textarea>
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
67
src/api/demo/pages/scan.tsx
Normal file
67
src/api/demo/pages/scan.tsx
Normal file
@@ -0,0 +1,67 @@
|
||||
import Head from "next/head";
|
||||
import { readFile } from "fs/promises";
|
||||
import styles from "../styles/Home.module.css";
|
||||
|
||||
import React from "react";
|
||||
|
||||
if (typeof window !== "undefined") {
|
||||
globalThis.Scan = await import("../lib/scan");
|
||||
await import("../lib/api");
|
||||
}
|
||||
|
||||
export async function getStaticProps(ctx) {
|
||||
return {
|
||||
props: {
|
||||
// not tested
|
||||
code: readFile("/Users/jarred/Build/es-module-lexer/test/samples/magic-string.js", { encoding: "utf-8" }),
|
||||
defaultFile: "magic-string.js",
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
var textDecoder = new TextDecoder();
|
||||
export default function Home({ code, defaultFile }) {
|
||||
const fileNameRef = React.useRef<HTMLInputElement>(null);
|
||||
const [lexer, setLexer] = React.useState("");
|
||||
const [bunResult, setBunResult] = React.useState("");
|
||||
const [file, setFile] = React.useState(defaultFile);
|
||||
React.useEffect(() => {
|
||||
globalThis.Scan.start();
|
||||
}, []);
|
||||
|
||||
const runBuild = React.useCallback(
|
||||
event => {
|
||||
globalThis.Scan.transform(event.target.value, fileNameRef?.current?.value).then(result => {
|
||||
setLexer(JSON.stringify(result.lexer, null, 2));
|
||||
setBunResult(JSON.stringify(result.bun, null, 2));
|
||||
}, console.error);
|
||||
},
|
||||
[fileNameRef, setBunResult, setLexer],
|
||||
);
|
||||
return (
|
||||
<div className={styles.container}>
|
||||
<Head>
|
||||
<title>Next.js</title>
|
||||
<meta name="description" content="Generated by create next app" />
|
||||
<link rel="icon" href="/favicon.ico" />
|
||||
</Head>
|
||||
|
||||
<main className={styles.main}>
|
||||
<div>
|
||||
<input
|
||||
autoComplete="filename"
|
||||
type="text"
|
||||
placeholder="filename"
|
||||
value={file}
|
||||
onChange={event => setFile(event.target.value)}
|
||||
ref={fileNameRef}
|
||||
/>
|
||||
<textarea onChange={runBuild} defaultValue={code}></textarea>
|
||||
|
||||
<textarea readOnly value={bunResult}></textarea>
|
||||
<textarea readOnly value={lexer}></textarea>
|
||||
</div>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
1
src/api/demo/pages/two.tsx
Normal file
1
src/api/demo/pages/two.tsx
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
2038
src/api/demo/pnpm-lock.yaml
generated
Normal file
2038
src/api/demo/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
BIN
src/api/demo/public/favicon.ico
Normal file
BIN
src/api/demo/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
4
src/api/demo/public/vercel.svg
Normal file
4
src/api/demo/public/vercel.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="283" height="64" viewBox="0 0 283 64" fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M141.04 16c-11.04 0-19 7.2-19 18s8.96 18 20 18c6.67 0 12.55-2.64 16.19-7.09l-7.65-4.42c-2.02 2.21-5.09 3.5-8.54 3.5-4.79 0-8.86-2.5-10.37-6.5h28.02c.22-1.12.35-2.28.35-3.5 0-10.79-7.96-17.99-19-17.99zm-9.46 14.5c1.25-3.99 4.67-6.5 9.45-6.5 4.79 0 8.21 2.51 9.45 6.5h-18.9zM248.72 16c-11.04 0-19 7.2-19 18s8.96 18 20 18c6.67 0 12.55-2.64 16.19-7.09l-7.65-4.42c-2.02 2.21-5.09 3.5-8.54 3.5-4.79 0-8.86-2.5-10.37-6.5h28.02c.22-1.12.35-2.28.35-3.5 0-10.79-7.96-17.99-19-17.99zm-9.45 14.5c1.25-3.99 4.67-6.5 9.45-6.5 4.79 0 8.21 2.51 9.45 6.5h-18.9zM200.24 34c0 6 3.92 10 10 10 4.12 0 7.21-1.87 8.8-4.92l7.68 4.43c-3.18 5.3-9.14 8.49-16.48 8.49-11.05 0-19-7.2-19-18s7.96-18 19-18c7.34 0 13.29 3.19 16.48 8.49l-7.68 4.43c-1.59-3.05-4.68-4.92-8.8-4.92-6.07 0-10 4-10 10zm82.48-29v46h-9V5h9zM36.95 0L73.9 64H0L36.95 0zm92.38 5l-27.71 48L73.91 5H84.3l17.32 30 17.32-30h10.39zm58.91 12v9.69c-1-.29-2.06-.49-3.2-.49-5.81 0-10 4-10 10V51h-9V17h9v9.2c0-5.08 5.91-9.2 13.2-9.2z" fill="#000"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
799
src/api/demo/schema.d.ts
vendored
Normal file
799
src/api/demo/schema.d.ts
vendored
Normal file
@@ -0,0 +1,799 @@
|
||||
import type { ByteBuffer } from "peechy";
|
||||
|
||||
type byte = number;
|
||||
type float = number;
|
||||
type int = number;
|
||||
type alphanumeric = string;
|
||||
type uint = number;
|
||||
type int8 = number;
|
||||
type lowp = number;
|
||||
type int16 = number;
|
||||
type int32 = number;
|
||||
type float32 = number;
|
||||
type uint16 = number;
|
||||
type uint32 = number;
|
||||
export enum Loader {
|
||||
jsx = 1,
|
||||
js = 2,
|
||||
ts = 3,
|
||||
tsx = 4,
|
||||
css = 5,
|
||||
file = 6,
|
||||
json = 7,
|
||||
toml = 8,
|
||||
wasm = 9,
|
||||
}
|
||||
export const LoaderKeys = {
|
||||
1: "jsx",
|
||||
jsx: "jsx",
|
||||
2: "js",
|
||||
js: "js",
|
||||
3: "ts",
|
||||
ts: "ts",
|
||||
4: "tsx",
|
||||
tsx: "tsx",
|
||||
5: "css",
|
||||
css: "css",
|
||||
6: "file",
|
||||
file: "file",
|
||||
7: "json",
|
||||
json: "json",
|
||||
8: "toml",
|
||||
toml: "toml",
|
||||
9: "wasm",
|
||||
wasm: "wasm",
|
||||
};
|
||||
export enum FrameworkEntryPointType {
|
||||
client = 1,
|
||||
server = 2,
|
||||
fallback = 3,
|
||||
}
|
||||
export const FrameworkEntryPointTypeKeys = {
|
||||
1: "client",
|
||||
client: "client",
|
||||
2: "server",
|
||||
server: "server",
|
||||
3: "fallback",
|
||||
fallback: "fallback",
|
||||
};
|
||||
export enum StackFrameScope {
|
||||
Eval = 1,
|
||||
Module = 2,
|
||||
Function = 3,
|
||||
Global = 4,
|
||||
Wasm = 5,
|
||||
Constructor = 6,
|
||||
}
|
||||
export const StackFrameScopeKeys = {
|
||||
1: "Eval",
|
||||
Eval: "Eval",
|
||||
2: "Module",
|
||||
Module: "Module",
|
||||
3: "Function",
|
||||
Function: "Function",
|
||||
4: "Global",
|
||||
Global: "Global",
|
||||
5: "Wasm",
|
||||
Wasm: "Wasm",
|
||||
6: "Constructor",
|
||||
Constructor: "Constructor",
|
||||
};
|
||||
export enum FallbackStep {
|
||||
ssr_disabled = 1,
|
||||
create_vm = 2,
|
||||
configure_router = 3,
|
||||
configure_defines = 4,
|
||||
resolve_entry_point = 5,
|
||||
load_entry_point = 6,
|
||||
eval_entry_point = 7,
|
||||
fetch_event_handler = 8,
|
||||
}
|
||||
export const FallbackStepKeys = {
|
||||
1: "ssr_disabled",
|
||||
ssr_disabled: "ssr_disabled",
|
||||
2: "create_vm",
|
||||
create_vm: "create_vm",
|
||||
3: "configure_router",
|
||||
configure_router: "configure_router",
|
||||
4: "configure_defines",
|
||||
configure_defines: "configure_defines",
|
||||
5: "resolve_entry_point",
|
||||
resolve_entry_point: "resolve_entry_point",
|
||||
6: "load_entry_point",
|
||||
load_entry_point: "load_entry_point",
|
||||
7: "eval_entry_point",
|
||||
eval_entry_point: "eval_entry_point",
|
||||
8: "fetch_event_handler",
|
||||
fetch_event_handler: "fetch_event_handler",
|
||||
};
|
||||
export enum ResolveMode {
|
||||
disable = 1,
|
||||
lazy = 2,
|
||||
dev = 3,
|
||||
bundle = 4,
|
||||
}
|
||||
export const ResolveModeKeys = {
|
||||
1: "disable",
|
||||
disable: "disable",
|
||||
2: "lazy",
|
||||
lazy: "lazy",
|
||||
3: "dev",
|
||||
dev: "dev",
|
||||
4: "bundle",
|
||||
bundle: "bundle",
|
||||
};
|
||||
export enum Platform {
|
||||
browser = 1,
|
||||
node = 2,
|
||||
bun = 3,
|
||||
bun_macro = 4,
|
||||
}
|
||||
export const PlatformKeys = {
|
||||
1: "browser",
|
||||
browser: "browser",
|
||||
2: "node",
|
||||
node: "node",
|
||||
3: "bun",
|
||||
bun: "bun",
|
||||
4: "bun_macro",
|
||||
bun_macro: "bun_macro",
|
||||
};
|
||||
export enum CSSInJSBehavior {
|
||||
facade = 1,
|
||||
facade_onimportcss = 2,
|
||||
auto_onimportcss = 3,
|
||||
}
|
||||
export const CSSInJSBehaviorKeys = {
|
||||
1: "facade",
|
||||
facade: "facade",
|
||||
2: "facade_onimportcss",
|
||||
facade_onimportcss: "facade_onimportcss",
|
||||
3: "auto_onimportcss",
|
||||
auto_onimportcss: "auto_onimportcss",
|
||||
};
|
||||
export enum JSXRuntime {
|
||||
automatic = 1,
|
||||
classic = 2,
|
||||
}
|
||||
export const JSXRuntimeKeys = {
|
||||
1: "automatic",
|
||||
automatic: "automatic",
|
||||
2: "classic",
|
||||
classic: "classic",
|
||||
};
|
||||
export enum ScanDependencyMode {
|
||||
app = 1,
|
||||
all = 2,
|
||||
}
|
||||
export const ScanDependencyModeKeys = {
|
||||
1: "app",
|
||||
app: "app",
|
||||
2: "all",
|
||||
all: "all",
|
||||
};
|
||||
export enum ModuleImportType {
|
||||
import = 1,
|
||||
require = 2,
|
||||
}
|
||||
export const ModuleImportTypeKeys = {
|
||||
1: "import",
|
||||
import: "import",
|
||||
2: "require",
|
||||
require: "require",
|
||||
};
|
||||
export enum DotEnvBehavior {
|
||||
disable = 1,
|
||||
prefix = 2,
|
||||
load_all = 3,
|
||||
}
|
||||
export const DotEnvBehaviorKeys = {
|
||||
1: "disable",
|
||||
disable: "disable",
|
||||
2: "prefix",
|
||||
prefix: "prefix",
|
||||
3: "load_all",
|
||||
load_all: "load_all",
|
||||
};
|
||||
export enum ImportKind {
|
||||
entry_point = 1,
|
||||
stmt = 2,
|
||||
require = 3,
|
||||
dynamic = 4,
|
||||
require_resolve = 5,
|
||||
at = 6,
|
||||
url = 7,
|
||||
internal = 8,
|
||||
}
|
||||
export const ImportKindKeys = {
|
||||
1: "entry_point",
|
||||
entry_point: "entry_point",
|
||||
2: "stmt",
|
||||
stmt: "stmt",
|
||||
3: "require",
|
||||
require: "require",
|
||||
4: "dynamic",
|
||||
dynamic: "dynamic",
|
||||
5: "require_resolve",
|
||||
require_resolve: "require_resolve",
|
||||
6: "at",
|
||||
at: "at",
|
||||
7: "url",
|
||||
url: "url",
|
||||
8: "internal",
|
||||
internal: "internal",
|
||||
};
|
||||
export enum TransformResponseStatus {
|
||||
success = 1,
|
||||
fail = 2,
|
||||
}
|
||||
export const TransformResponseStatusKeys = {
|
||||
1: "success",
|
||||
success: "success",
|
||||
2: "fail",
|
||||
fail: "fail",
|
||||
};
|
||||
export enum MessageLevel {
|
||||
err = 1,
|
||||
warn = 2,
|
||||
note = 3,
|
||||
info = 4,
|
||||
debug = 5,
|
||||
}
|
||||
export const MessageLevelKeys = {
|
||||
1: "err",
|
||||
err: "err",
|
||||
2: "warn",
|
||||
warn: "warn",
|
||||
3: "note",
|
||||
note: "note",
|
||||
4: "info",
|
||||
info: "info",
|
||||
5: "debug",
|
||||
debug: "debug",
|
||||
};
|
||||
export enum Reloader {
|
||||
disable = 1,
|
||||
live = 2,
|
||||
fast_refresh = 3,
|
||||
}
|
||||
export const ReloaderKeys = {
|
||||
1: "disable",
|
||||
disable: "disable",
|
||||
2: "live",
|
||||
live: "live",
|
||||
3: "fast_refresh",
|
||||
fast_refresh: "fast_refresh",
|
||||
};
|
||||
export enum WebsocketMessageKind {
|
||||
welcome = 1,
|
||||
file_change_notification = 2,
|
||||
build_success = 3,
|
||||
build_fail = 4,
|
||||
manifest_success = 5,
|
||||
manifest_fail = 6,
|
||||
resolve_file = 7,
|
||||
file_change_notification_with_hint = 8,
|
||||
}
|
||||
export const WebsocketMessageKindKeys = {
|
||||
1: "welcome",
|
||||
welcome: "welcome",
|
||||
2: "file_change_notification",
|
||||
file_change_notification: "file_change_notification",
|
||||
3: "build_success",
|
||||
build_success: "build_success",
|
||||
4: "build_fail",
|
||||
build_fail: "build_fail",
|
||||
5: "manifest_success",
|
||||
manifest_success: "manifest_success",
|
||||
6: "manifest_fail",
|
||||
manifest_fail: "manifest_fail",
|
||||
7: "resolve_file",
|
||||
resolve_file: "resolve_file",
|
||||
8: "file_change_notification_with_hint",
|
||||
file_change_notification_with_hint: "file_change_notification_with_hint",
|
||||
};
|
||||
export enum WebsocketCommandKind {
|
||||
build = 1,
|
||||
manifest = 2,
|
||||
build_with_file_path = 3,
|
||||
}
|
||||
export const WebsocketCommandKindKeys = {
|
||||
1: "build",
|
||||
build: "build",
|
||||
2: "manifest",
|
||||
manifest: "manifest",
|
||||
3: "build_with_file_path",
|
||||
build_with_file_path: "build_with_file_path",
|
||||
};
|
||||
export interface StackFrame {
|
||||
function_name: string;
|
||||
file: string;
|
||||
position: StackFramePosition;
|
||||
scope: StackFrameScope;
|
||||
}
|
||||
|
||||
export interface StackFramePosition {
|
||||
source_offset: int32;
|
||||
line: int32;
|
||||
line_start: int32;
|
||||
line_stop: int32;
|
||||
column_start: int32;
|
||||
column_stop: int32;
|
||||
expression_start: int32;
|
||||
expression_stop: int32;
|
||||
}
|
||||
|
||||
export interface SourceLine {
|
||||
line: int32;
|
||||
text: string;
|
||||
}
|
||||
|
||||
export interface StackTrace {
|
||||
source_lines: SourceLine[];
|
||||
frames: StackFrame[];
|
||||
}
|
||||
|
||||
export interface JSException {
|
||||
name?: string;
|
||||
message?: string;
|
||||
runtime_type?: uint16;
|
||||
code?: uint8;
|
||||
stack?: StackTrace;
|
||||
}
|
||||
|
||||
export interface Problems {
|
||||
code: uint16;
|
||||
name: string;
|
||||
exceptions: JSException[];
|
||||
build: Log;
|
||||
}
|
||||
|
||||
export interface Router {
|
||||
routes: StringMap;
|
||||
route: int32;
|
||||
params: StringMap;
|
||||
}
|
||||
|
||||
export interface FallbackMessageContainer {
|
||||
message?: string;
|
||||
router?: Router;
|
||||
reason?: FallbackStep;
|
||||
problems?: Problems;
|
||||
cwd?: string;
|
||||
}
|
||||
|
||||
export interface JSX {
|
||||
factory: string;
|
||||
runtime: JSXRuntime;
|
||||
fragment: string;
|
||||
development: boolean;
|
||||
import_source: string;
|
||||
react_fast_refresh: boolean;
|
||||
}
|
||||
|
||||
export interface StringPointer {
|
||||
offset: uint32;
|
||||
length: uint32;
|
||||
}
|
||||
|
||||
export interface JavascriptBundledModule {
|
||||
path: StringPointer;
|
||||
code: StringPointer;
|
||||
package_id: uint32;
|
||||
id: uint32;
|
||||
path_extname_length: byte;
|
||||
}
|
||||
|
||||
export interface JavascriptBundledPackage {
|
||||
name: StringPointer;
|
||||
version: StringPointer;
|
||||
hash: uint32;
|
||||
modules_offset: uint32;
|
||||
modules_length: uint32;
|
||||
}
|
||||
|
||||
export interface JavascriptBundle {
|
||||
modules: JavascriptBundledModule[];
|
||||
packages: JavascriptBundledPackage[];
|
||||
etag: Uint8Array;
|
||||
generated_at: uint32;
|
||||
app_package_json_dependencies_hash: Uint8Array;
|
||||
import_from_name: Uint8Array;
|
||||
manifest_string: Uint8Array;
|
||||
}
|
||||
|
||||
export interface JavascriptBundleContainer {
|
||||
bundle_format_version?: uint32;
|
||||
routes?: LoadedRouteConfig;
|
||||
framework?: LoadedFramework;
|
||||
bundle?: JavascriptBundle;
|
||||
code_length?: uint32;
|
||||
}
|
||||
|
||||
export interface ModuleImportRecord {
|
||||
kind: ModuleImportType;
|
||||
path: string;
|
||||
dynamic: boolean;
|
||||
}
|
||||
|
||||
export interface Module {
|
||||
path: string;
|
||||
imports: ModuleImportRecord[];
|
||||
}
|
||||
|
||||
export interface StringMap {
|
||||
keys: string[];
|
||||
values: string[];
|
||||
}
|
||||
|
||||
export interface LoaderMap {
|
||||
extensions: string[];
|
||||
loaders: Loader[];
|
||||
}
|
||||
|
||||
export interface EnvConfig {
|
||||
prefix?: string;
|
||||
defaults?: StringMap;
|
||||
}
|
||||
|
||||
export interface LoadedEnvConfig {
|
||||
dotenv: DotEnvBehavior;
|
||||
defaults: StringMap;
|
||||
prefix: string;
|
||||
}
|
||||
|
||||
export interface FrameworkConfig {
|
||||
package?: string;
|
||||
client?: FrameworkEntryPointMessage;
|
||||
server?: FrameworkEntryPointMessage;
|
||||
fallback?: FrameworkEntryPointMessage;
|
||||
development?: boolean;
|
||||
client_css_in_js?: CSSInJSBehavior;
|
||||
display_name?: string;
|
||||
overrideModules?: StringMap;
|
||||
}
|
||||
|
||||
export interface FrameworkEntryPoint {
|
||||
kind: FrameworkEntryPointType;
|
||||
path: string;
|
||||
env: LoadedEnvConfig;
|
||||
}
|
||||
|
||||
export interface FrameworkEntryPointMap {
|
||||
client?: FrameworkEntryPoint;
|
||||
server?: FrameworkEntryPoint;
|
||||
fallback?: FrameworkEntryPoint;
|
||||
}
|
||||
|
||||
export interface FrameworkEntryPointMessage {
|
||||
path?: string;
|
||||
env?: EnvConfig;
|
||||
}
|
||||
|
||||
export interface LoadedFramework {
|
||||
package: string;
|
||||
display_name: string;
|
||||
development: boolean;
|
||||
entry_points: FrameworkEntryPointMap;
|
||||
client_css_in_js: CSSInJSBehavior;
|
||||
overrideModules: StringMap;
|
||||
}
|
||||
|
||||
export interface LoadedRouteConfig {
|
||||
dir: string;
|
||||
extensions: string[];
|
||||
static_dir: string;
|
||||
asset_prefix: string;
|
||||
}
|
||||
|
||||
export interface RouteConfig {
|
||||
dir?: string[];
|
||||
extensions?: string[];
|
||||
static_dir?: string;
|
||||
asset_prefix?: string;
|
||||
}
|
||||
|
||||
export interface TransformOptions {
|
||||
jsx?: JSX;
|
||||
tsconfig_override?: string;
|
||||
resolve?: ResolveMode;
|
||||
origin?: string;
|
||||
absolute_working_dir?: string;
|
||||
define?: StringMap;
|
||||
preserve_symlinks?: boolean;
|
||||
entry_points?: string[];
|
||||
write?: boolean;
|
||||
inject?: string[];
|
||||
output_dir?: string;
|
||||
external?: string[];
|
||||
loaders?: LoaderMap;
|
||||
main_fields?: string[];
|
||||
platform?: Platform;
|
||||
serve?: boolean;
|
||||
extension_order?: string[];
|
||||
generate_node_module_bundle?: boolean;
|
||||
node_modules_bundle_path?: string;
|
||||
node_modules_bundle_path_server?: string;
|
||||
framework?: FrameworkConfig;
|
||||
router?: RouteConfig;
|
||||
no_summary?: boolean;
|
||||
disable_hmr?: boolean;
|
||||
port?: uint16;
|
||||
logLevel?: MessageLevel;
|
||||
}
|
||||
|
||||
export interface FileHandle {
|
||||
path: string;
|
||||
size: uint;
|
||||
fd: uint;
|
||||
}
|
||||
|
||||
export interface Transform {
|
||||
handle?: FileHandle;
|
||||
path?: string;
|
||||
contents?: Uint8Array;
|
||||
loader?: Loader;
|
||||
options?: TransformOptions;
|
||||
}
|
||||
|
||||
export interface Scan {
|
||||
path?: string;
|
||||
contents?: Uint8Array;
|
||||
loader?: Loader;
|
||||
}
|
||||
|
||||
export interface ScanResult {
|
||||
exports: string[];
|
||||
imports: ScannedImport[];
|
||||
}
|
||||
|
||||
export interface ScannedImport {
|
||||
path: string;
|
||||
kind: ImportKind;
|
||||
}
|
||||
|
||||
export interface OutputFile {
|
||||
data: Uint8Array;
|
||||
path: string;
|
||||
}
|
||||
|
||||
export interface TransformResponse {
|
||||
status: TransformResponseStatus;
|
||||
files: OutputFile[];
|
||||
errors: Message[];
|
||||
}
|
||||
|
||||
export interface Location {
|
||||
file: string;
|
||||
namespace: string;
|
||||
line: int32;
|
||||
column: int32;
|
||||
line_text: string;
|
||||
suggestion: string;
|
||||
offset: uint;
|
||||
}
|
||||
|
||||
export interface MessageData {
|
||||
text?: string;
|
||||
location?: Location;
|
||||
}
|
||||
|
||||
export interface MessageMeta {
|
||||
resolve?: string;
|
||||
build?: boolean;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
level: MessageLevel;
|
||||
data: MessageData;
|
||||
notes: MessageData[];
|
||||
on: MessageMeta;
|
||||
}
|
||||
|
||||
export interface Log {
|
||||
warnings: uint32;
|
||||
errors: uint32;
|
||||
msgs: Message[];
|
||||
}
|
||||
|
||||
export interface WebsocketMessage {
|
||||
timestamp: uint32;
|
||||
kind: WebsocketMessageKind;
|
||||
}
|
||||
|
||||
export interface WebsocketMessageWelcome {
|
||||
epoch: uint32;
|
||||
javascriptReloader: Reloader;
|
||||
cwd: string;
|
||||
}
|
||||
|
||||
export interface WebsocketMessageFileChangeNotification {
|
||||
id: uint32;
|
||||
loader: Loader;
|
||||
}
|
||||
|
||||
export interface WebsocketCommand {
|
||||
kind: WebsocketCommandKind;
|
||||
timestamp: uint32;
|
||||
}
|
||||
|
||||
export interface WebsocketCommandBuild {
|
||||
id: uint32;
|
||||
}
|
||||
|
||||
export interface WebsocketCommandManifest {
|
||||
id: uint32;
|
||||
}
|
||||
|
||||
export interface WebsocketMessageBuildSuccess {
|
||||
id: uint32;
|
||||
from_timestamp: uint32;
|
||||
loader: Loader;
|
||||
module_path: string;
|
||||
blob_length: uint32;
|
||||
}
|
||||
|
||||
export interface WebsocketMessageBuildFailure {
|
||||
id: uint32;
|
||||
from_timestamp: uint32;
|
||||
loader: Loader;
|
||||
module_path: string;
|
||||
log: Log;
|
||||
}
|
||||
|
||||
export interface WebsocketCommandBuildWithFilePath {
|
||||
id: uint32;
|
||||
file_path: string;
|
||||
}
|
||||
|
||||
export interface WebsocketMessageResolveID {
|
||||
id: uint32;
|
||||
}
|
||||
|
||||
export interface NPMRegistry {
|
||||
url: string;
|
||||
username: string;
|
||||
password: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
export interface NPMRegistryMap {
|
||||
scopes: string[];
|
||||
registries: NPMRegistry[];
|
||||
}
|
||||
|
||||
export interface BunInstall {
|
||||
default_registry?: NPMRegistry;
|
||||
scoped?: NPMRegistryMap;
|
||||
lockfile_path?: string;
|
||||
save_lockfile_path?: string;
|
||||
cache_directory?: string;
|
||||
dry_run?: boolean;
|
||||
force?: boolean;
|
||||
save_dev?: boolean;
|
||||
save_optional?: boolean;
|
||||
save_peer?: boolean;
|
||||
save_lockfile?: boolean;
|
||||
production?: boolean;
|
||||
save_yarn_lockfile?: boolean;
|
||||
native_bin_links?: string[];
|
||||
disable_cache?: boolean;
|
||||
disable_manifest_cache?: boolean;
|
||||
global_dir?: string;
|
||||
global_bin_dir?: string;
|
||||
}
|
||||
|
||||
export declare function encodeStackFrame(message: StackFrame, bb: ByteBuffer): void;
|
||||
export declare function decodeStackFrame(buffer: ByteBuffer): StackFrame;
|
||||
export declare function encodeStackFramePosition(message: StackFramePosition, bb: ByteBuffer): void;
|
||||
export declare function decodeStackFramePosition(buffer: ByteBuffer): StackFramePosition;
|
||||
export declare function encodeSourceLine(message: SourceLine, bb: ByteBuffer): void;
|
||||
export declare function decodeSourceLine(buffer: ByteBuffer): SourceLine;
|
||||
export declare function encodeStackTrace(message: StackTrace, bb: ByteBuffer): void;
|
||||
export declare function decodeStackTrace(buffer: ByteBuffer): StackTrace;
|
||||
export declare function encodeJSException(message: JSException, bb: ByteBuffer): void;
|
||||
export declare function decodeJSException(buffer: ByteBuffer): JSException;
|
||||
export declare function encodeProblems(message: Problems, bb: ByteBuffer): void;
|
||||
export declare function decodeProblems(buffer: ByteBuffer): Problems;
|
||||
export declare function encodeRouter(message: Router, bb: ByteBuffer): void;
|
||||
export declare function decodeRouter(buffer: ByteBuffer): Router;
|
||||
export declare function encodeFallbackMessageContainer(message: FallbackMessageContainer, bb: ByteBuffer): void;
|
||||
export declare function decodeFallbackMessageContainer(buffer: ByteBuffer): FallbackMessageContainer;
|
||||
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
|
||||
export declare function decodeJSX(buffer: ByteBuffer): JSX;
|
||||
export declare function encodeStringPointer(message: StringPointer, bb: ByteBuffer): void;
|
||||
export declare function decodeStringPointer(buffer: ByteBuffer): StringPointer;
|
||||
export declare function encodeJavascriptBundledModule(message: JavascriptBundledModule, bb: ByteBuffer): void;
|
||||
export declare function decodeJavascriptBundledModule(buffer: ByteBuffer): JavascriptBundledModule;
|
||||
export declare function encodeJavascriptBundledPackage(message: JavascriptBundledPackage, bb: ByteBuffer): void;
|
||||
export declare function decodeJavascriptBundledPackage(buffer: ByteBuffer): JavascriptBundledPackage;
|
||||
export declare function encodeJavascriptBundle(message: JavascriptBundle, bb: ByteBuffer): void;
|
||||
export declare function decodeJavascriptBundle(buffer: ByteBuffer): JavascriptBundle;
|
||||
export declare function encodeJavascriptBundleContainer(message: JavascriptBundleContainer, bb: ByteBuffer): void;
|
||||
export declare function decodeJavascriptBundleContainer(buffer: ByteBuffer): JavascriptBundleContainer;
|
||||
export declare function encodeModuleImportRecord(message: ModuleImportRecord, bb: ByteBuffer): void;
|
||||
export declare function decodeModuleImportRecord(buffer: ByteBuffer): ModuleImportRecord;
|
||||
export declare function encodeModule(message: Module, bb: ByteBuffer): void;
|
||||
export declare function decodeModule(buffer: ByteBuffer): Module;
|
||||
export declare function encodeStringMap(message: StringMap, bb: ByteBuffer): void;
|
||||
export declare function decodeStringMap(buffer: ByteBuffer): StringMap;
|
||||
export declare function encodeLoaderMap(message: LoaderMap, bb: ByteBuffer): void;
|
||||
export declare function decodeLoaderMap(buffer: ByteBuffer): LoaderMap;
|
||||
export declare function encodeEnvConfig(message: EnvConfig, bb: ByteBuffer): void;
|
||||
export declare function decodeEnvConfig(buffer: ByteBuffer): EnvConfig;
|
||||
export declare function encodeLoadedEnvConfig(message: LoadedEnvConfig, bb: ByteBuffer): void;
|
||||
export declare function decodeLoadedEnvConfig(buffer: ByteBuffer): LoadedEnvConfig;
|
||||
export declare function encodeFrameworkConfig(message: FrameworkConfig, bb: ByteBuffer): void;
|
||||
export declare function decodeFrameworkConfig(buffer: ByteBuffer): FrameworkConfig;
|
||||
export declare function encodeFrameworkEntryPoint(message: FrameworkEntryPoint, bb: ByteBuffer): void;
|
||||
export declare function decodeFrameworkEntryPoint(buffer: ByteBuffer): FrameworkEntryPoint;
|
||||
export declare function encodeFrameworkEntryPointMap(message: FrameworkEntryPointMap, bb: ByteBuffer): void;
|
||||
export declare function decodeFrameworkEntryPointMap(buffer: ByteBuffer): FrameworkEntryPointMap;
|
||||
export declare function encodeFrameworkEntryPointMessage(message: FrameworkEntryPointMessage, bb: ByteBuffer): void;
|
||||
export declare function decodeFrameworkEntryPointMessage(buffer: ByteBuffer): FrameworkEntryPointMessage;
|
||||
export declare function encodeLoadedFramework(message: LoadedFramework, bb: ByteBuffer): void;
|
||||
export declare function decodeLoadedFramework(buffer: ByteBuffer): LoadedFramework;
|
||||
export declare function encodeLoadedRouteConfig(message: LoadedRouteConfig, bb: ByteBuffer): void;
|
||||
export declare function decodeLoadedRouteConfig(buffer: ByteBuffer): LoadedRouteConfig;
|
||||
export declare function encodeRouteConfig(message: RouteConfig, bb: ByteBuffer): void;
|
||||
export declare function decodeRouteConfig(buffer: ByteBuffer): RouteConfig;
|
||||
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
|
||||
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
|
||||
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
|
||||
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
|
||||
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
|
||||
export declare function decodeTransform(buffer: ByteBuffer): Transform;
|
||||
export declare function encodeScan(message: Scan, bb: ByteBuffer): void;
|
||||
export declare function decodeScan(buffer: ByteBuffer): Scan;
|
||||
export declare function encodeScanResult(message: ScanResult, bb: ByteBuffer): void;
|
||||
export declare function decodeScanResult(buffer: ByteBuffer): ScanResult;
|
||||
export declare function encodeScannedImport(message: ScannedImport, bb: ByteBuffer): void;
|
||||
export declare function decodeScannedImport(buffer: ByteBuffer): ScannedImport;
|
||||
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
|
||||
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
|
||||
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
|
||||
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
|
||||
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
|
||||
export declare function decodeLocation(buffer: ByteBuffer): Location;
|
||||
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
|
||||
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
|
||||
export declare function encodeMessageMeta(message: MessageMeta, bb: ByteBuffer): void;
|
||||
export declare function decodeMessageMeta(buffer: ByteBuffer): MessageMeta;
|
||||
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
|
||||
export declare function decodeMessage(buffer: ByteBuffer): Message;
|
||||
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
|
||||
export declare function decodeLog(buffer: ByteBuffer): Log;
|
||||
export declare function encodeWebsocketMessage(message: WebsocketMessage, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketMessage(buffer: ByteBuffer): WebsocketMessage;
|
||||
export declare function encodeWebsocketMessageWelcome(message: WebsocketMessageWelcome, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketMessageWelcome(buffer: ByteBuffer): WebsocketMessageWelcome;
|
||||
export declare function encodeWebsocketMessageFileChangeNotification(
|
||||
message: WebsocketMessageFileChangeNotification,
|
||||
bb: ByteBuffer,
|
||||
): void;
|
||||
export declare function decodeWebsocketMessageFileChangeNotification(
|
||||
buffer: ByteBuffer,
|
||||
): WebsocketMessageFileChangeNotification;
|
||||
export declare function encodeWebsocketCommand(message: WebsocketCommand, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketCommand(buffer: ByteBuffer): WebsocketCommand;
|
||||
export declare function encodeWebsocketCommandBuild(message: WebsocketCommandBuild, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketCommandBuild(buffer: ByteBuffer): WebsocketCommandBuild;
|
||||
export declare function encodeWebsocketCommandManifest(message: WebsocketCommandManifest, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketCommandManifest(buffer: ByteBuffer): WebsocketCommandManifest;
|
||||
export declare function encodeWebsocketMessageBuildSuccess(message: WebsocketMessageBuildSuccess, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketMessageBuildSuccess(buffer: ByteBuffer): WebsocketMessageBuildSuccess;
|
||||
export declare function encodeWebsocketMessageBuildFailure(message: WebsocketMessageBuildFailure, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketMessageBuildFailure(buffer: ByteBuffer): WebsocketMessageBuildFailure;
|
||||
export declare function encodeWebsocketCommandBuildWithFilePath(
|
||||
message: WebsocketCommandBuildWithFilePath,
|
||||
bb: ByteBuffer,
|
||||
): void;
|
||||
export declare function decodeWebsocketCommandBuildWithFilePath(buffer: ByteBuffer): WebsocketCommandBuildWithFilePath;
|
||||
export declare function encodeWebsocketMessageResolveID(message: WebsocketMessageResolveID, bb: ByteBuffer): void;
|
||||
export declare function decodeWebsocketMessageResolveID(buffer: ByteBuffer): WebsocketMessageResolveID;
|
||||
export declare function encodeNPMRegistry(message: NPMRegistry, bb: ByteBuffer): void;
|
||||
export declare function decodeNPMRegistry(buffer: ByteBuffer): NPMRegistry;
|
||||
export declare function encodeNPMRegistryMap(message: NPMRegistryMap, bb: ByteBuffer): void;
|
||||
export declare function decodeNPMRegistryMap(buffer: ByteBuffer): NPMRegistryMap;
|
||||
export declare function encodeBunInstall(message: BunInstall, bb: ByteBuffer): void;
|
||||
export declare function decodeBunInstall(buffer: ByteBuffer): BunInstall;
|
||||
3257
src/api/demo/schema.js
Normal file
3257
src/api/demo/schema.js
Normal file
File diff suppressed because it is too large
Load Diff
553
src/api/demo/schema.peechy
Normal file
553
src/api/demo/schema.peechy
Normal file
@@ -0,0 +1,553 @@
|
||||
package Api;
|
||||
|
||||
smol Loader {
|
||||
jsx = 1;
|
||||
js = 2;
|
||||
ts = 3;
|
||||
tsx = 4;
|
||||
css = 5;
|
||||
file = 6;
|
||||
json = 7;
|
||||
toml = 8;
|
||||
wasm = 9;
|
||||
}
|
||||
|
||||
smol FrameworkEntryPointType {
|
||||
client = 1;
|
||||
server = 2;
|
||||
fallback = 3;
|
||||
}
|
||||
|
||||
smol StackFrameScope {
|
||||
Eval = 1;
|
||||
Module = 2;
|
||||
Function = 3;
|
||||
Global = 4;
|
||||
Wasm = 5;
|
||||
Constructor = 6;
|
||||
}
|
||||
|
||||
struct StackFrame {
|
||||
string function_name;
|
||||
string file;
|
||||
StackFramePosition position;
|
||||
StackFrameScope scope;
|
||||
}
|
||||
|
||||
struct StackFramePosition {
|
||||
int32 source_offset;
|
||||
int32 line;
|
||||
int32 line_start;
|
||||
int32 line_stop;
|
||||
int32 column_start;
|
||||
int32 column_stop;
|
||||
int32 expression_start;
|
||||
int32 expression_stop;
|
||||
}
|
||||
|
||||
struct SourceLine {
|
||||
int32 line;
|
||||
string text;
|
||||
}
|
||||
|
||||
struct StackTrace {
|
||||
SourceLine[] source_lines;
|
||||
StackFrame[] frames;
|
||||
}
|
||||
|
||||
|
||||
message JSException {
|
||||
string name = 1;
|
||||
string message = 2;
|
||||
|
||||
uint16 runtime_type = 3;
|
||||
uint8 code = 4;
|
||||
|
||||
StackTrace stack = 5;
|
||||
}
|
||||
|
||||
smol FallbackStep {
|
||||
ssr_disabled = 1;
|
||||
create_vm = 2;
|
||||
configure_router = 3;
|
||||
configure_defines = 4;
|
||||
resolve_entry_point = 5;
|
||||
load_entry_point = 6;
|
||||
eval_entry_point = 7;
|
||||
fetch_event_handler = 8;
|
||||
}
|
||||
|
||||
struct Problems {
|
||||
uint16 code;
|
||||
string name;
|
||||
|
||||
JSException[] exceptions;
|
||||
Log build;
|
||||
}
|
||||
|
||||
struct Router {
|
||||
StringMap routes;
|
||||
int32 route;
|
||||
StringMap params;
|
||||
}
|
||||
|
||||
message FallbackMessageContainer {
|
||||
string message = 1;
|
||||
Router router = 2;
|
||||
FallbackStep reason = 3;
|
||||
Problems problems = 4;
|
||||
string cwd = 5;
|
||||
}
|
||||
|
||||
|
||||
smol ResolveMode {
|
||||
disable = 1;
|
||||
lazy = 2;
|
||||
dev = 3;
|
||||
bundle = 4;
|
||||
}
|
||||
|
||||
smol Target {
|
||||
browser = 1;
|
||||
node = 2;
|
||||
bun = 3;
|
||||
bun_macro = 4;
|
||||
}
|
||||
|
||||
smol CSSInJSBehavior {
|
||||
facade = 1;
|
||||
facade_onimportcss = 2;
|
||||
auto_onimportcss = 3;
|
||||
}
|
||||
|
||||
smol JSXRuntime {
|
||||
automatic = 1;
|
||||
classic = 2;
|
||||
}
|
||||
|
||||
struct JSX {
|
||||
string factory;
|
||||
JSXRuntime runtime;
|
||||
string fragment;
|
||||
bool development;
|
||||
|
||||
// Probably react
|
||||
string import_source;
|
||||
|
||||
bool react_fast_refresh;
|
||||
}
|
||||
|
||||
struct StringPointer {
|
||||
uint32 offset;
|
||||
uint32 length;
|
||||
}
|
||||
|
||||
struct JavascriptBundledModule {
|
||||
// package-relative path including file extension
|
||||
StringPointer path;
|
||||
|
||||
// Source code
|
||||
StringPointer code;
|
||||
|
||||
// index into JavascriptBundle.packages
|
||||
uint32 package_id;
|
||||
|
||||
// The ESM export is this id ("$" + number.toString(16))
|
||||
uint32 id;
|
||||
|
||||
// This lets us efficiently compare strings ignoring the extension
|
||||
byte path_extname_length;
|
||||
}
|
||||
|
||||
struct JavascriptBundledPackage {
|
||||
StringPointer name;
|
||||
StringPointer version;
|
||||
uint32 hash;
|
||||
|
||||
uint32 modules_offset;
|
||||
uint32 modules_length;
|
||||
}
|
||||
|
||||
struct JavascriptBundle {
|
||||
// These are sorted alphabetically so you can do binary search
|
||||
JavascriptBundledModule[] modules;
|
||||
JavascriptBundledPackage[] packages;
|
||||
|
||||
// This is ASCII-encoded so you can send it directly over HTTP
|
||||
byte[] etag;
|
||||
|
||||
uint32 generated_at;
|
||||
|
||||
// generated by hashing all ${name}@${version} in sorted order
|
||||
byte[] app_package_json_dependencies_hash;
|
||||
|
||||
byte[] import_from_name;
|
||||
|
||||
// This is what StringPointer refers to
|
||||
byte[] manifest_string;
|
||||
}
|
||||
|
||||
message JavascriptBundleContainer {
|
||||
uint32 bundle_format_version = 1;
|
||||
|
||||
// These go first so if we change JavaScriptBundle we can still read these
|
||||
LoadedRouteConfig routes = 3;
|
||||
LoadedFramework framework = 2;
|
||||
|
||||
JavascriptBundle bundle = 4;
|
||||
|
||||
// Don't technically need to store this, but it may be helpful as a sanity check
|
||||
uint32 code_length = 5;
|
||||
}
|
||||
|
||||
smol ScanDependencyMode {
|
||||
app = 1;
|
||||
all = 2;
|
||||
}
|
||||
|
||||
smol ModuleImportType {
|
||||
import = 1;
|
||||
require = 2;
|
||||
}
|
||||
|
||||
struct ModuleImportRecord {
|
||||
ModuleImportType kind;
|
||||
string path;
|
||||
|
||||
bool dynamic;
|
||||
}
|
||||
|
||||
struct Module {
|
||||
string path;
|
||||
ModuleImportRecord[] imports;
|
||||
}
|
||||
|
||||
struct StringMap {
|
||||
string[] keys;
|
||||
string[] values;
|
||||
}
|
||||
|
||||
struct LoaderMap {
|
||||
string[] extensions;
|
||||
Loader[] loaders;
|
||||
}
|
||||
|
||||
enum DotEnvBehavior {
|
||||
disable = 1;
|
||||
prefix = 2;
|
||||
load_all = 3;
|
||||
}
|
||||
|
||||
message EnvConfig {
|
||||
string prefix = 1;
|
||||
StringMap defaults = 2;
|
||||
}
|
||||
|
||||
struct LoadedEnvConfig {
|
||||
DotEnvBehavior dotenv;
|
||||
|
||||
StringMap defaults;
|
||||
string prefix;
|
||||
}
|
||||
|
||||
message FrameworkConfig {
|
||||
string package = 1;
|
||||
FrameworkEntryPointMessage client = 2;
|
||||
FrameworkEntryPointMessage server = 3;
|
||||
FrameworkEntryPointMessage fallback = 4;
|
||||
bool development = 5;
|
||||
|
||||
CSSInJSBehavior client_css_in_js = 6;
|
||||
string display_name = 7;
|
||||
|
||||
StringMap overrideModules = 8;
|
||||
}
|
||||
|
||||
struct FrameworkEntryPoint {
|
||||
FrameworkEntryPointType kind;
|
||||
string path;
|
||||
LoadedEnvConfig env;
|
||||
}
|
||||
|
||||
message FrameworkEntryPointMap {
|
||||
FrameworkEntryPoint client = 1;
|
||||
FrameworkEntryPoint server = 2;
|
||||
FrameworkEntryPoint fallback = 3;
|
||||
}
|
||||
|
||||
message FrameworkEntryPointMessage {
|
||||
string path = 1;
|
||||
EnvConfig env = 2;
|
||||
}
|
||||
|
||||
struct LoadedFramework {
|
||||
string package;
|
||||
string display_name;
|
||||
bool development;
|
||||
FrameworkEntryPointMap entry_points;
|
||||
CSSInJSBehavior client_css_in_js;
|
||||
StringMap overrideModules;
|
||||
}
|
||||
|
||||
struct LoadedRouteConfig {
|
||||
string dir;
|
||||
string[] extensions;
|
||||
string static_dir;
|
||||
string asset_prefix;
|
||||
}
|
||||
|
||||
message RouteConfig {
|
||||
string[] dir = 1;
|
||||
string[] extensions = 2;
|
||||
string static_dir = 3;
|
||||
string asset_prefix = 4;
|
||||
}
|
||||
|
||||
message TransformOptions {
|
||||
JSX jsx = 1;
|
||||
string tsconfig_override = 2;
|
||||
ResolveMode resolve = 3;
|
||||
|
||||
string origin = 4;
|
||||
string absolute_working_dir = 5;
|
||||
|
||||
StringMap define = 6;
|
||||
|
||||
bool preserve_symlinks = 7;
|
||||
|
||||
string[] entry_points = 8;
|
||||
bool write = 9;
|
||||
|
||||
string[] inject = 10;
|
||||
string output_dir = 11;
|
||||
|
||||
string[] external = 12;
|
||||
|
||||
LoaderMap loaders = 13;
|
||||
|
||||
string[] main_fields = 14;
|
||||
Target target = 15;
|
||||
|
||||
bool serve = 16;
|
||||
|
||||
string[] extension_order = 17;
|
||||
|
||||
bool generate_node_module_bundle = 18;
|
||||
|
||||
string node_modules_bundle_path = 19;
|
||||
string node_modules_bundle_path_server = 20;
|
||||
|
||||
FrameworkConfig framework = 21;
|
||||
RouteConfig router = 22;
|
||||
bool no_summary = 23;
|
||||
|
||||
bool disable_hmr = 24;
|
||||
|
||||
uint16 port = 25;
|
||||
MessageLevel logLevel = 26;
|
||||
}
|
||||
|
||||
struct FileHandle {
|
||||
string path;
|
||||
uint size;
|
||||
uint fd;
|
||||
}
|
||||
|
||||
message Transform {
|
||||
FileHandle handle = 1;
|
||||
string path = 2;
|
||||
byte[] contents = 3;
|
||||
|
||||
Loader loader = 4;
|
||||
TransformOptions options = 5;
|
||||
}
|
||||
|
||||
enum TransformResponseStatus {
|
||||
success = 1;
|
||||
fail = 2;
|
||||
}
|
||||
|
||||
struct OutputFile {
|
||||
byte[] data;
|
||||
string path;
|
||||
}
|
||||
|
||||
struct TransformResponse {
|
||||
TransformResponseStatus status;
|
||||
OutputFile[] files;
|
||||
Message[] errors;
|
||||
}
|
||||
|
||||
enum MessageLevel {
|
||||
err = 1;
|
||||
warn =2;
|
||||
note = 3;
|
||||
info = 4;
|
||||
debug = 5;
|
||||
}
|
||||
|
||||
struct Location {
|
||||
string file;
|
||||
string namespace;
|
||||
int32 line;
|
||||
int32 column;
|
||||
string line_text;
|
||||
string suggestion;
|
||||
uint offset;
|
||||
}
|
||||
|
||||
message MessageData {
|
||||
string text = 1;
|
||||
Location location = 2;
|
||||
}
|
||||
|
||||
|
||||
|
||||
message MessageMeta {
|
||||
string resolve = 1;
|
||||
bool build = 2;
|
||||
}
|
||||
|
||||
struct Message {
|
||||
MessageLevel level;
|
||||
MessageData data;
|
||||
MessageData[] notes;
|
||||
MessageMeta on;
|
||||
}
|
||||
|
||||
struct Log {
|
||||
uint32 warnings;
|
||||
uint32 errors;
|
||||
Message[] msgs;
|
||||
}
|
||||
|
||||
|
||||
smol Reloader {
|
||||
disable = 1;
|
||||
// equivalent of CMD + R
|
||||
live = 2;
|
||||
// React Fast Refresh
|
||||
fast_refresh = 3;
|
||||
}
|
||||
|
||||
// The WebSocket protocol
|
||||
// Server: "hey, this file changed. Does anyone want it?"
|
||||
// Browser: *checks array* "uhh yeah, ok. rebuild that for me"
|
||||
// Server: "here u go"
|
||||
// This makes the client responsible for tracking which files it needs to listen for.
|
||||
// From a server perspective, this means the filesystem watching thread can send the same WebSocket message
|
||||
// to every client, which is good for performance. It means if you have 5 tabs open it won't really be different than one tab
|
||||
// The clients can just ignore files they don't care about
|
||||
smol WebsocketMessageKind {
|
||||
welcome = 1;
|
||||
file_change_notification = 2;
|
||||
build_success = 3;
|
||||
build_fail = 4;
|
||||
manifest_success = 5;
|
||||
manifest_fail = 6;
|
||||
resolve_file = 7;
|
||||
file_change_notification_with_hint = 8;
|
||||
}
|
||||
|
||||
smol WebsocketCommandKind {
|
||||
build = 1;
|
||||
manifest = 2;
|
||||
build_with_file_path = 3;
|
||||
}
|
||||
|
||||
// Each websocket message has two messages in it!
|
||||
// This is the first.
|
||||
struct WebsocketMessage {
|
||||
uint32 timestamp;
|
||||
WebsocketMessageKind kind;
|
||||
}
|
||||
|
||||
// This is the first.
|
||||
struct WebsocketMessageWelcome {
|
||||
uint32 epoch;
|
||||
Reloader javascriptReloader;
|
||||
string cwd;
|
||||
}
|
||||
|
||||
struct WebsocketMessageFileChangeNotification {
|
||||
uint32 id;
|
||||
Loader loader;
|
||||
}
|
||||
|
||||
struct WebsocketCommand {
|
||||
WebsocketCommandKind kind;
|
||||
uint32 timestamp;
|
||||
}
|
||||
|
||||
// The timestamp is used for client-side deduping
|
||||
struct WebsocketCommandBuild {
|
||||
uint32 id;
|
||||
}
|
||||
|
||||
struct WebsocketCommandManifest {
|
||||
uint32 id;
|
||||
}
|
||||
|
||||
// We copy the module_path here incase they don't already have it
|
||||
struct WebsocketMessageBuildSuccess {
|
||||
uint32 id;
|
||||
uint32 from_timestamp;
|
||||
|
||||
Loader loader;
|
||||
string module_path;
|
||||
|
||||
// This is the length of the blob that immediately follows this message.
|
||||
uint32 blob_length;
|
||||
}
|
||||
|
||||
struct WebsocketMessageBuildFailure {
|
||||
uint32 id;
|
||||
uint32 from_timestamp;
|
||||
Loader loader;
|
||||
|
||||
string module_path;
|
||||
Log log;
|
||||
}
|
||||
|
||||
struct WebsocketCommandBuildWithFilePath {
|
||||
uint32 id;
|
||||
string file_path;
|
||||
}
|
||||
|
||||
struct WebsocketMessageResolveID {
|
||||
uint32 id;
|
||||
}
|
||||
|
||||
struct NPMRegistry {
|
||||
string url;
|
||||
string username;
|
||||
string password;
|
||||
string token;
|
||||
}
|
||||
|
||||
struct NPMRegistryMap {
|
||||
string[] scopes;
|
||||
NPMRegistry[] registries;
|
||||
}
|
||||
|
||||
message BunInstall {
|
||||
NPMRegistry default_registry = 1;
|
||||
NPMRegistryMap scoped = 2;
|
||||
string lockfile_path = 3;
|
||||
string save_lockfile_path = 4;
|
||||
string cache_directory = 5;
|
||||
bool dry_run = 6;
|
||||
bool force = 7;
|
||||
bool save_dev = 8;
|
||||
bool save_optional = 9;
|
||||
bool save_peer = 10;
|
||||
bool save_lockfile = 11;
|
||||
bool production = 12;
|
||||
bool save_yarn_lockfile = 13;
|
||||
string[] native_bin_links = 14;
|
||||
|
||||
bool disable_cache = 15;
|
||||
bool disable_manifest_cache = 16;
|
||||
string global_dir = 17;
|
||||
string global_bin_dir = 18;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user