mirror of
https://github.com/oven-sh/bun
synced 2026-02-02 23:18:47 +00:00
Compare commits
356 Commits
debugger-d
...
jarred/bro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c4ab91459 | ||
|
|
9a2bce2102 | ||
|
|
df0db54c58 | ||
|
|
661355546a | ||
|
|
71f1aa1802 | ||
|
|
eaff66b098 | ||
|
|
b760d1da30 | ||
|
|
728c8fdcdb | ||
|
|
0fd493fd78 | ||
|
|
ba43fdf317 | ||
|
|
1becd014fa | ||
|
|
6924134cd5 | ||
|
|
3a2e66ab62 | ||
|
|
802f9ccb58 | ||
|
|
c676a64ce4 | ||
|
|
0119b623d4 | ||
|
|
13b54fbdb8 | ||
|
|
9273e29f0e | ||
|
|
9f031b3642 | ||
|
|
ca3b7fa3c9 | ||
|
|
8a176913d8 | ||
|
|
570a44d73a | ||
|
|
5218a33fb6 | ||
|
|
58824ea743 | ||
|
|
99de971359 | ||
|
|
55b5aa3571 | ||
|
|
6ca20424d6 | ||
|
|
36a25c3580 | ||
|
|
13b5d9d4de | ||
|
|
fa4db1de42 | ||
|
|
2a02f3d669 | ||
|
|
75213aad37 | ||
|
|
5856639833 | ||
|
|
edeb3b48e8 | ||
|
|
3613429dc3 | ||
|
|
7917ebd58f | ||
|
|
a86d00c672 | ||
|
|
d5c51092c8 | ||
|
|
6a234e6fce | ||
|
|
dc766eb18a | ||
|
|
209dc981c0 | ||
|
|
7fc392b182 | ||
|
|
6baa08313c | ||
|
|
131ed06020 | ||
|
|
e546e4064c | ||
|
|
c39c11e101 | ||
|
|
2551210426 | ||
|
|
43abf2629f | ||
|
|
325147261f | ||
|
|
044b09afc2 | ||
|
|
9eb8eea2a8 | ||
|
|
04b4157232 | ||
|
|
a02a79e26c | ||
|
|
ae0a724981 | ||
|
|
0631f87866 | ||
|
|
4e1a81231c | ||
|
|
b566573977 | ||
|
|
666feb3b7e | ||
|
|
5ea32a976e | ||
|
|
34b0e7a2bb | ||
|
|
c5f2280489 | ||
|
|
a686b3bfc1 | ||
|
|
a1fb289c96 | ||
|
|
8ca2194a37 | ||
|
|
03904f73cc | ||
|
|
2106e1d7f6 | ||
|
|
854ddaa909 | ||
|
|
609f81a746 | ||
|
|
fd4c8fb871 | ||
|
|
bab58b7541 | ||
|
|
5c8726d602 | ||
|
|
ae7bc37e94 | ||
|
|
31ab56d362 | ||
|
|
4b333b2d35 | ||
|
|
c4c5eb2d32 | ||
|
|
e7afae305c | ||
|
|
17031936c8 | ||
|
|
b2c576bba2 | ||
|
|
4af4b508a1 | ||
|
|
0c2df4ae01 | ||
|
|
cc0d920018 | ||
|
|
438d54f186 | ||
|
|
c6e1135548 | ||
|
|
ec11170311 | ||
|
|
538bcef731 | ||
|
|
93b0e94410 | ||
|
|
963d4311e6 | ||
|
|
2f5e4fffe9 | ||
|
|
ca42c820d2 | ||
|
|
565d1689e9 | ||
|
|
59570fe237 | ||
|
|
aa8b832ef6 | ||
|
|
fa632c3331 | ||
|
|
5846ad00ed | ||
|
|
e26d3821ce | ||
|
|
fceacea37c | ||
|
|
52b7962dae | ||
|
|
386d8b7836 | ||
|
|
67ee498861 | ||
|
|
42eacaf3a9 | ||
|
|
c0cf7b4501 | ||
|
|
affd06d05c | ||
|
|
7ab8d832fb | ||
|
|
0ecdbf4793 | ||
|
|
284aaec3cd | ||
|
|
9bebb7f03d | ||
|
|
5c0a5646cd | ||
|
|
4d2c86fd5c | ||
|
|
d905dbe214 | ||
|
|
038ca83004 | ||
|
|
d377265b67 | ||
|
|
a415f482db | ||
|
|
954b6fcaf3 | ||
|
|
abe095dd1e | ||
|
|
109ebc14fd | ||
|
|
95ddfcc437 | ||
|
|
ee57935260 | ||
|
|
6bf8f6f9f2 | ||
|
|
8869bac411 | ||
|
|
f61d9ef476 | ||
|
|
3aaec120e7 | ||
|
|
c864976da6 | ||
|
|
a7f5a91cfb | ||
|
|
75816aa3ab | ||
|
|
cac7dcdc76 | ||
|
|
9c374eac96 | ||
|
|
b2e28f133e | ||
|
|
3a93ddfab3 | ||
|
|
979e999403 | ||
|
|
c2755f770c | ||
|
|
31f7f9e2dc | ||
|
|
491e8f7e00 | ||
|
|
bb96aa5156 | ||
|
|
f49a308d2c | ||
|
|
3d0ffc48cb | ||
|
|
bc7b5165be | ||
|
|
a76b07a802 | ||
|
|
33bc507f74 | ||
|
|
3345a7fc3c | ||
|
|
b26b0d886c | ||
|
|
034577c9da | ||
|
|
424717a973 | ||
|
|
e5f93ddf55 | ||
|
|
983039a18a | ||
|
|
a7a01bd52f | ||
|
|
48d726bfd0 | ||
|
|
12a342b6c0 | ||
|
|
1206352b4a | ||
|
|
9da9bac30c | ||
|
|
cd243f40ee | ||
|
|
000417731b | ||
|
|
c77518ff93 | ||
|
|
f0a795b568 | ||
|
|
aa38e51afb | ||
|
|
c21fadf9bc | ||
|
|
0db31c2b43 | ||
|
|
3a0a423bce | ||
|
|
eb90ce50c3 | ||
|
|
c3f8593f8c | ||
|
|
4cbda049e9 | ||
|
|
69396aae01 | ||
|
|
b05879e9e2 | ||
|
|
8001038376 | ||
|
|
c7cc618376 | ||
|
|
9fecb3dfb9 | ||
|
|
617226e584 | ||
|
|
c72c82b970 | ||
|
|
6cae6ebafe | ||
|
|
a2cca6e292 | ||
|
|
01d3b130a9 | ||
|
|
af46a8ded1 | ||
|
|
f00e2be548 | ||
|
|
a4fe433db7 | ||
|
|
f3833376e7 | ||
|
|
4720fa1207 | ||
|
|
df10252979 | ||
|
|
c033d55c47 | ||
|
|
c794ea7ea7 | ||
|
|
02f707f231 | ||
|
|
d356cd5d48 | ||
|
|
b83faf8018 | ||
|
|
2a73d3c793 | ||
|
|
039bbc68ad | ||
|
|
b9460087e3 | ||
|
|
c42a00f9df | ||
|
|
76b9cae259 | ||
|
|
70a87e1181 | ||
|
|
7af757d104 | ||
|
|
9c66fdc703 | ||
|
|
fec0d15c4f | ||
|
|
853e377159 | ||
|
|
8984c81961 | ||
|
|
c6f6db95ff | ||
|
|
8b2c72300c | ||
|
|
8481f2922f | ||
|
|
68e6fe00a4 | ||
|
|
3258bed1c0 | ||
|
|
32d9abcc03 | ||
|
|
0cee640199 | ||
|
|
182e8aa139 | ||
|
|
945093ee7e | ||
|
|
086ca176be | ||
|
|
292647bd53 | ||
|
|
42ded70336 | ||
|
|
3d5573921e | ||
|
|
43752ec3f0 | ||
|
|
e6e3d9e368 | ||
|
|
0de5bb22af | ||
|
|
f670c0fc18 | ||
|
|
09994d6067 | ||
|
|
f81d084f5c | ||
|
|
940c9a8185 | ||
|
|
28d7507a5d | ||
|
|
4f34d48029 | ||
|
|
c78aa5a60f | ||
|
|
7ba4ae11c9 | ||
|
|
5376b5b5d6 | ||
|
|
3ec6c9e4fb | ||
|
|
2eb885c1d8 | ||
|
|
1c46d88728 | ||
|
|
d220d9ee5a | ||
|
|
e77f593b76 | ||
|
|
b305309e64 | ||
|
|
c96f3b303b | ||
|
|
ecb0bd39b6 | ||
|
|
4e4cae0fc3 | ||
|
|
4be15cff02 | ||
|
|
f839cf1ce3 | ||
|
|
1e6a41b5cf | ||
|
|
1ed1723a2f | ||
|
|
7a0b1656c7 | ||
|
|
28f27f733b | ||
|
|
a5100ad380 | ||
|
|
16598555f1 | ||
|
|
a732999da5 | ||
|
|
6d01e6e367 | ||
|
|
50e872fc76 | ||
|
|
318879d174 | ||
|
|
ec3ed67bc9 | ||
|
|
76626ac54b | ||
|
|
116bcf4245 | ||
|
|
f2285a6d71 | ||
|
|
e682ffb61c | ||
|
|
9ebb25427a | ||
|
|
66195ffeed | ||
|
|
662335d81a | ||
|
|
d8817c2d32 | ||
|
|
15ac08474e | ||
|
|
fdfbb18531 | ||
|
|
3ed28f2828 | ||
|
|
fcf9f0a7ee | ||
|
|
33903ea892 | ||
|
|
5bd94b8f47 | ||
|
|
aa5432e162 | ||
|
|
c4c714f10b | ||
|
|
f8abd167d8 | ||
|
|
777f98bd10 | ||
|
|
30798c7078 | ||
|
|
bc7719fc28 | ||
|
|
ff63555143 | ||
|
|
069b42a7cc | ||
|
|
ceec1afec2 | ||
|
|
5ae8e5d773 | ||
|
|
4ff920c915 | ||
|
|
217501e180 | ||
|
|
ae4b65c52e | ||
|
|
ca1fe3c602 | ||
|
|
4a13a53058 | ||
|
|
5fa13625a1 | ||
|
|
bfb322d618 | ||
|
|
43865a3e29 | ||
|
|
b691212c6e | ||
|
|
69f558db8e | ||
|
|
9befbf02f1 | ||
|
|
f81c7f10f6 | ||
|
|
b9c950bfb7 | ||
|
|
6a1fbef8fd | ||
|
|
5006435234 | ||
|
|
adb451eec6 | ||
|
|
62639081c1 | ||
|
|
570ef07170 | ||
|
|
83d7ec728f | ||
|
|
9f301e13c5 | ||
|
|
f1b1dbf5cd | ||
|
|
bdbb637b3d | ||
|
|
e9e0e05156 | ||
|
|
7d94a49ef4 | ||
|
|
cbd6d24d34 | ||
|
|
b951c1f89e | ||
|
|
b78b2a3031 | ||
|
|
7dcf8b2186 | ||
|
|
8a5b8f1fca | ||
|
|
8ad9e5755d | ||
|
|
fdb7940c4e | ||
|
|
873f615358 | ||
|
|
b2af1984ed | ||
|
|
065713aeca | ||
|
|
68c092aef2 | ||
|
|
9b8054ae11 | ||
|
|
0591c6b4bb | ||
|
|
4a22fcd93c | ||
|
|
dc06caccaa | ||
|
|
e6d4b3a89a | ||
|
|
0f131a976e | ||
|
|
7f535a20a2 | ||
|
|
bac7526c03 | ||
|
|
c2a83f87d8 | ||
|
|
333b3f6beb | ||
|
|
b7ff3b0745 | ||
|
|
a1bb79f440 | ||
|
|
171ba6327d | ||
|
|
2ab797ff75 | ||
|
|
bdb1b7124a | ||
|
|
b93bdbb124 | ||
|
|
c8d06f04d3 | ||
|
|
067a0235e4 | ||
|
|
dbb2416542 | ||
|
|
51c093e24e | ||
|
|
50a18b6bac | ||
|
|
9a8d9b5e91 | ||
|
|
ef65f3c305 | ||
|
|
02eafd5019 | ||
|
|
04cd6a82b8 | ||
|
|
a2ec2ea397 | ||
|
|
f51878a8ec | ||
|
|
f18423cf45 | ||
|
|
68b9731db2 | ||
|
|
76cf465cc2 | ||
|
|
0f018ea215 | ||
|
|
a8dc41cd9f | ||
|
|
dbcaa02d89 | ||
|
|
e6b1574046 | ||
|
|
5df26a6a9b | ||
|
|
fc3331628e | ||
|
|
0ec70119f2 | ||
|
|
bf518222d4 | ||
|
|
6565bd89d5 | ||
|
|
99485bec4c | ||
|
|
0eb5103642 | ||
|
|
edd03341b1 | ||
|
|
1d2f06b37e | ||
|
|
870e5ac1dc | ||
|
|
7e296a1adc | ||
|
|
b9a705f84b | ||
|
|
d265ed80d2 | ||
|
|
fa3cfd34cb | ||
|
|
bcd67c3455 | ||
|
|
ec71e7afe4 | ||
|
|
17bca62df1 | ||
|
|
0c11762c31 | ||
|
|
fe7d5357d8 | ||
|
|
568f170e12 | ||
|
|
c87d65856c | ||
|
|
9b996e702e | ||
|
|
2cb1376a93 | ||
|
|
1f3da24fe0 |
2
.github/workflows/bun-linux-aarch64.yml
vendored
2
.github/workflows/bun-linux-aarch64.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
|
||||
13
.github/workflows/bun-linux-build.yml
vendored
13
.github/workflows/bun-linux-build.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
- cpu: nehalem
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
|
||||
@@ -187,10 +187,15 @@ jobs:
|
||||
unzip bun-${{matrix.tag}}.zip
|
||||
cd bun-${{matrix.tag}}
|
||||
chmod +x bun
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_POSTGRES_DATABASE_URL: ${{ secrets.PRISMA_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_MONGODB_DATABASE_URL: ${{ secrets.PRISMA_MONGODB_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
|
||||
37
.github/workflows/bun-mac-aarch64.yml
vendored
37
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
dependencies: true
|
||||
compile_obj: true
|
||||
@@ -173,9 +173,9 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -265,14 +265,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -291,8 +291,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -397,7 +397,7 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 10
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
@@ -426,10 +426,15 @@ jobs:
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_POSTGRES_DATABASE_URL: ${{ secrets.PRISMA_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_MONGODB_DATABASE_URL: ${{ secrets.PRISMA_MONGODB_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
|
||||
35
.github/workflows/bun-mac-x64-baseline.yml
vendored
35
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -173,9 +173,9 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -258,7 +258,7 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -266,14 +266,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -292,8 +292,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -430,10 +430,15 @@ jobs:
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_POSTGRES_DATABASE_URL: ${{ secrets.PRISMA_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_MONGODB_DATABASE_URL: ${{ secrets.PRISMA_MONGODB_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
|
||||
33
.github/workflows/bun-mac-x64.yml
vendored
33
.github/workflows/bun-mac-x64.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -173,8 +173,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
@@ -260,7 +260,7 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
@@ -268,14 +268,14 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/may20-4/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -294,8 +294,8 @@ jobs:
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
|
||||
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@15)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@15
|
||||
- name: Download WebKit
|
||||
env:
|
||||
@@ -432,10 +432,15 @@ jobs:
|
||||
unzip ${{matrix.tag}}.zip
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
sudo mv bun /usr/local/bin/bun
|
||||
bun --version
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_POSTGRES_DATABASE_URL: ${{ secrets.PRISMA_POSTGRES_DATABASE_URL }}
|
||||
PRISMA_MONGODB_DATABASE_URL: ${{ secrets.PRISMA_MONGODB_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: bun-release-canary
|
||||
name: bun-release-types-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
push:
|
||||
|
||||
4
.github/workflows/bun-release.yml
vendored
4
.github/workflows/bun-release.yml
vendored
@@ -156,8 +156,8 @@ jobs:
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: |
|
||||
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
|
||||
4
.github/workflows/zig-fmt.yml
vendored
4
.github/workflows/zig-fmt.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.11.0-dev.2571+31738de28
|
||||
ZIG_VERSION: 0.11.0-dev.4006+bf827d0b5
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
run: |
|
||||
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
|
||||
tar -xf zig.tar.xz
|
||||
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
|
||||
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -121,3 +121,5 @@ cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
/test.ts
|
||||
|
||||
src/js/out/modules_dev
|
||||
|
||||
15
.gitmodules
vendored
15
.gitmodules
vendored
@@ -68,4 +68,17 @@ fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/zstd"]
|
||||
path = src/deps/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
ignore = dirty
|
||||
ignore = dirty
|
||||
[submodule "src/deps/base64"]
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
[submodule "src/deps/brotli"]
|
||||
path = src/deps/brotli
|
||||
url = https://github.com/google/brotli.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
@@ -6,7 +6,7 @@ module.exports = {
|
||||
quoteProps: "preserve",
|
||||
overrides: [
|
||||
{
|
||||
files: "README.md",
|
||||
files: ["*.md"],
|
||||
options: {
|
||||
printWidth: 80,
|
||||
},
|
||||
|
||||
19
.vscode/launch.json
generated
vendored
19
.vscode/launch.json
generated
vendored
@@ -14,7 +14,8 @@
|
||||
"name": "bun test [file]",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -29,7 +30,8 @@
|
||||
"name": "bun test [file] (fast)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
@@ -44,7 +46,8 @@
|
||||
"name": "bun test [file] (verbose)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1"
|
||||
},
|
||||
@@ -57,7 +60,8 @@
|
||||
"name": "bun test [file] --watch",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--watch", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
@@ -71,7 +75,8 @@
|
||||
"name": "bun test [file] --only",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
@@ -100,6 +105,7 @@
|
||||
"name": "bun test [*] (fast)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -114,6 +120,7 @@
|
||||
"name": "bun test [*] --only",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "--only"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -318,7 +325,7 @@
|
||||
"name": "bun install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${fileDirname}",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -7,6 +7,8 @@
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
// We do this until we upgrade to latest Zig so that zls doesn't break our code.
|
||||
"zig.formattingProvider": "extension",
|
||||
"zig.buildArgs": ["obj", "-Dfor-editor"],
|
||||
"zig.buildOption": "build",
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
|
||||
49
Dockerfile
49
Dockerfile
@@ -10,9 +10,9 @@ ARG ARCH=x86_64
|
||||
ARG BUILD_MACHINE_ARCH=x86_64
|
||||
ARG TRIPLET=${ARCH}-linux-gnu
|
||||
ARG BUILDARCH=amd64
|
||||
ARG WEBKIT_TAG=may20
|
||||
ARG WEBKIT_TAG=may20-4
|
||||
ARG ZIG_TAG=jul1
|
||||
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
|
||||
ARG ZIG_VERSION="0.11.0-dev.4006+bf827d0b5"
|
||||
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
|
||||
|
||||
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
|
||||
@@ -295,6 +295,49 @@ WORKDIR $BUN_DIR
|
||||
RUN cd $BUN_DIR && \
|
||||
make uws && rm -rf src/deps/uws Makefile
|
||||
|
||||
FROM bun-base as base64
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG GITHUB_WORKSPACE
|
||||
ARG ZIG_PATH
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR
|
||||
ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make base64 && rm -rf src/deps/base64 Makefile
|
||||
|
||||
FROM bun-base as brotli
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG GITHUB_WORKSPACE
|
||||
ARG ZIG_PATH
|
||||
# Directory extracts to "bun-webkit"
|
||||
ARG WEBKIT_DIR
|
||||
ARG BUN_RELEASE_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make brotli && rm -rf src/deps/brotli Makefile
|
||||
|
||||
|
||||
FROM bun-base as picohttp
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
@@ -556,6 +599,8 @@ ENV JSC_BASE_DIR=${WEBKIT_DIR}
|
||||
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
|
||||
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=brotli ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
|
||||
|
||||
92
Makefile
92
Makefile
@@ -453,7 +453,8 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
|
||||
-ldecrepit \
|
||||
-lssl \
|
||||
-lcrypto \
|
||||
-llolhtml
|
||||
-llolhtml \
|
||||
-lbase64
|
||||
|
||||
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
|
||||
-larchive \
|
||||
@@ -461,6 +462,9 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
|
||||
-lusockets \
|
||||
-lcares \
|
||||
-lzstd \
|
||||
-lbrotlicommon \
|
||||
-lbrotlienc \
|
||||
-lbrotlidec \
|
||||
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
|
||||
|
||||
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
|
||||
@@ -561,6 +565,9 @@ builtins:
|
||||
esm:
|
||||
NODE_ENV=production bun src/js/build-esm.ts
|
||||
|
||||
esm-debug:
|
||||
BUN_DEBUG_QUIET_LOGS=1 NODE_ENV=production bun-debug src/js/build-esm.ts
|
||||
|
||||
.PHONY: generate-builtins
|
||||
generate-builtins: builtins
|
||||
|
||||
@@ -680,8 +687,19 @@ require:
|
||||
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
|
||||
@echo "You have the dependencies installed! Woo"
|
||||
|
||||
init-submodules:
|
||||
git submodule update --init --recursive --progress --depth=1 --checkout
|
||||
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
|
||||
# unless you explicity clone it yourself (a huge download)
|
||||
SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}')
|
||||
ifeq ("$(wildcard src/bun.js/WebKit/.git)", "")
|
||||
SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES))
|
||||
endif
|
||||
|
||||
.PHONY: init-submodules
|
||||
init-submodules: submodule # (backwards-compatibility alias)
|
||||
|
||||
.PHONY: submodule
|
||||
submodule: ## to init or update all submodules
|
||||
git submodule update --init --recursive --progress --depth=1 --checkout $(SUBMODULE_NAMES)
|
||||
|
||||
.PHONY: build-obj
|
||||
build-obj:
|
||||
@@ -848,6 +866,18 @@ fetch: $(IO_FILES)
|
||||
$(CXX) $(PACKAGE_DIR)/fetch.o -g $(OPTIMIZATION_LEVEL) -o ./misctools/fetch $(IO_FILES) $(DEFAULT_LINKER_FLAGS) -lc $(MINIMUM_ARCHIVE_FILES)
|
||||
rm -rf $(PACKAGE_DIR)/fetch.o
|
||||
|
||||
.PHONY: stream-tester
|
||||
stream-tester:
|
||||
$(ZIG) build -Doptimize=ReleaseFast stream-tester-obj
|
||||
$(CXX) $(PACKAGE_DIR)/stream-tester.o -g $(OPTIMIZATION_LEVEL) -o ./misctools/stream-tester $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES) $(ICU_FLAGS)
|
||||
rm -rf $(PACKAGE_DIR)/stream-tester.o
|
||||
|
||||
.PHONY: stream-tester-debug
|
||||
stream-tester-debug:
|
||||
$(ZIG) build stream-tester-obj -Doptimize=Debug
|
||||
$(CXX) $(DEBUG_PACKAGE_DIR)/stream-tester.o -g3 -o ./misctools/stream-tester $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES) $(ICU_FLAGS)
|
||||
|
||||
|
||||
.PHONY: sha
|
||||
sha:
|
||||
$(ZIG) build -Doptimize=ReleaseFast sha-bench-obj
|
||||
@@ -1085,7 +1115,7 @@ dev-obj-linux:
|
||||
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
|
||||
|
||||
.PHONY: dev
|
||||
dev: mkdir-dev esm dev-obj bun-link-lld-debug
|
||||
dev: mkdir-dev esm dev-obj link ## compile zig changes + link bun
|
||||
|
||||
mkdir-dev:
|
||||
mkdir -p $(DEBUG_PACKAGE_DIR)
|
||||
@@ -1351,20 +1381,39 @@ mimalloc:
|
||||
&& ninja;
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
|
||||
.PHONY: brotli
|
||||
brotli:
|
||||
rm -rf $(BUN_DEPS_DIR)/brotli/CMakeCache* $(BUN_DEPS_DIR)/brotli/CMakeFiles && (cd src/deps/brotli && make clean || echo "")
|
||||
cd $(BUN_DEPS_DIR)/brotli; \
|
||||
cmake $(CMAKE_FLAGS) \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBROTLI_DISABLE_TESTS=ON \
|
||||
-DBROTLI_BUNDLED_MODE=ON \
|
||||
-DBUILD_SHARED_LIBS=OFF \
|
||||
-DCMAKE_C_FLAGS="$(CFLAGS)" \
|
||||
-DCMAKE_CXX_FLAGS="$(CFLAGS)" \
|
||||
-GNinja \
|
||||
. \
|
||||
&& ninja && cp libbrotli*.a $(BUN_DEPS_OUT_DIR);
|
||||
|
||||
mimalloc-wasm:
|
||||
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
|
||||
|
||||
bun-link-lld-debug:
|
||||
# alias for link, incase anyone still types that
|
||||
.PHONY: bun-link-lld-debug
|
||||
bun-link-lld-debug: link
|
||||
|
||||
.PHONY: link
|
||||
link: ## link a debug build of bun
|
||||
$(CXX) $(BUN_LLD_FLAGS_DEBUG) $(DEBUG_FLAGS) $(SYMBOLS) \
|
||||
-g \
|
||||
$(DEBUG_BIN)/bun-debug.o \
|
||||
-W \
|
||||
-o $(DEBUG_BIN)/bun-debug
|
||||
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
|
||||
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
|
||||
|
||||
bun-link-lld-debug-no-jsc:
|
||||
link-no-jsc:
|
||||
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
|
||||
-g \
|
||||
$(DEBUG_BIN)/bun-debug.o \
|
||||
@@ -1685,7 +1734,7 @@ sizegen:
|
||||
# Linux uses bundled SQLite3
|
||||
ifeq ($(OS_NAME),linux)
|
||||
sqlite:
|
||||
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
|
||||
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
|
||||
endif
|
||||
|
||||
picohttp:
|
||||
@@ -1781,7 +1830,7 @@ endif
|
||||
endif
|
||||
|
||||
.PHONY: build-unit
|
||||
build-unit: ## to build your unit tests
|
||||
build-unit: # to build your unit tests
|
||||
@rm -rf zig-out/bin/$(testname)
|
||||
@mkdir -p zig-out/bin
|
||||
zig test $(realpath $(testpath)) \
|
||||
@@ -1799,7 +1848,7 @@ build-unit: ## to build your unit tests
|
||||
cp zig-out/bin/$(testname) $(testbinpath)
|
||||
|
||||
.PHONY: run-all-unit-tests
|
||||
run-all-unit-tests: ## to run your unit tests
|
||||
run-all-unit-tests: # to run your unit tests
|
||||
@rm -rf zig-out/bin/__main_test
|
||||
@mkdir -p zig-out/bin
|
||||
zig test src/main.zig \
|
||||
@@ -1819,15 +1868,11 @@ run-all-unit-tests: ## to run your unit tests
|
||||
run-unit:
|
||||
@zig-out/bin/$(testname) $(ZIG)
|
||||
|
||||
.PHONY: help
|
||||
help: ## to print this help
|
||||
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
|
||||
|
||||
.PHONY: test
|
||||
test: build-unit run-unit
|
||||
|
||||
.PHONY: integration-test-dev
|
||||
integration-test-dev: ## to run integration tests
|
||||
integration-test-dev: # to run integration tests
|
||||
USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node test/scripts/browser.js
|
||||
|
||||
copy-install:
|
||||
@@ -1841,6 +1886,10 @@ copy-to-bun-release-dir-bin:
|
||||
|
||||
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
|
||||
|
||||
.PHONY: base64
|
||||
base64:
|
||||
cd $(BUN_DEPS_DIR)/base64 && make clean && cmake $(CMAKE_FLAGS) . && make
|
||||
cp $(BUN_DEPS_DIR)/base64/libbase64.a $(BUN_DEPS_OUT_DIR)/libbase64.a
|
||||
|
||||
.PHONY: cold-jsc-start
|
||||
cold-jsc-start:
|
||||
@@ -1859,22 +1908,22 @@ cold-jsc-start:
|
||||
misctools/cold-jsc-start.cpp -o cold-jsc-start
|
||||
|
||||
.PHONY: vendor-without-npm
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64 brotli
|
||||
|
||||
.PHONY: vendor-without-check
|
||||
vendor-without-check: npm-install vendor-without-npm
|
||||
|
||||
.PHONY: vendor
|
||||
vendor: require init-submodules vendor-without-check
|
||||
vendor: require submodule vendor-without-check
|
||||
|
||||
.PHONY: vendor-dev
|
||||
vendor-dev: require init-submodules npm-install-dev vendor-without-npm
|
||||
vendor-dev: require submodule npm-install-dev vendor-without-npm
|
||||
|
||||
.PHONY: bun
|
||||
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
|
||||
|
||||
.PHONY: regenerate-bindings
|
||||
regenerate-bindings:
|
||||
regenerate-bindings: ## compile src/js/builtins + all c++ code, does not link
|
||||
@make clean-bindings builtins
|
||||
@make bindings -j$(CPU_COUNT)
|
||||
|
||||
@@ -1886,3 +1935,8 @@ setup: vendor-dev identifier-cache clean-bindings
|
||||
@echo "Development environment setup complete"
|
||||
@echo "Run \`make dev\` to build \`bun-debug\`"
|
||||
@echo ""
|
||||
|
||||
.PHONY: help
|
||||
help: ## to print this help
|
||||
@echo "For detailed build instructions, see https://bun.sh/docs/project/development"
|
||||
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
|
||||
|
||||
@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
|
||||
var position = try std.fmt.parseInt(u32, position_str, 10);
|
||||
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
|
||||
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
|
||||
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
|
||||
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
|
||||
std.debug.assert(ms > 0);
|
||||
// std.debug.assert(std.math.isFinite(position));
|
||||
var prng = std.rand.DefaultPrng.init(0);
|
||||
@@ -125,30 +125,30 @@ pub fn main() anyerror!void {
|
||||
);
|
||||
};
|
||||
|
||||
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
|
||||
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
|
||||
counters[counter].rotate = rotate % 360;
|
||||
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
|
||||
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
|
||||
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
|
||||
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
|
||||
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
|
||||
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
|
||||
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
|
||||
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
|
||||
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
|
||||
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
|
||||
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
|
||||
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
|
||||
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
|
||||
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
|
||||
_ = try recorder.wait();
|
||||
|
||||
all_timestamps[0] = wrote.len;
|
||||
for (counters) |count, i| {
|
||||
for (counters, 0..) |count, i| {
|
||||
all_timestamps[i + 1] = count.timestamp;
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
|
||||
var position = try std.fmt.parseInt(u32, position_str, 10);
|
||||
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
|
||||
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
|
||||
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
|
||||
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
|
||||
std.debug.assert(ms > 0);
|
||||
// std.debug.assert(std.math.isFinite(position));
|
||||
var prng = std.rand.DefaultPrng.init(0);
|
||||
@@ -112,30 +112,30 @@ pub fn main() anyerror!void {
|
||||
\\
|
||||
++ SIMULATE_LONG_FILE;
|
||||
|
||||
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
|
||||
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
|
||||
counters[counter].rotate = rotate % 360;
|
||||
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
|
||||
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
|
||||
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
|
||||
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
|
||||
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
|
||||
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
|
||||
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
|
||||
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
|
||||
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
|
||||
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
|
||||
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
|
||||
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
|
||||
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
|
||||
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
|
||||
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
|
||||
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
|
||||
_ = try recorder.wait();
|
||||
|
||||
all_timestamps[0] = wrote.len;
|
||||
for (counters) |count, i| {
|
||||
for (counters, 0..) |count, i| {
|
||||
all_timestamps[i + 1] = count.timestamp;
|
||||
}
|
||||
|
||||
|
||||
14
bench/snippets/base64-buffer-to-string.mjs
Normal file
14
bench/snippets/base64-buffer-to-string.mjs
Normal file
@@ -0,0 +1,14 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { Buffer } from "node:buffer";
|
||||
|
||||
const bigBuffer = Buffer.from("hello world".repeat(10000));
|
||||
const converted = bigBuffer.toString("base64");
|
||||
bench("Buffer.toString('base64')", () => {
|
||||
return bigBuffer.toString("base64");
|
||||
});
|
||||
|
||||
// bench("Buffer.from(str, 'base64')", () => {
|
||||
// return Buffer.from(converted, "base64");
|
||||
// });
|
||||
|
||||
await run();
|
||||
@@ -1,12 +1,6 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
var crypto = globalThis.crypto;
|
||||
|
||||
if (!crypto) {
|
||||
crypto = await import("node:crypto");
|
||||
}
|
||||
|
||||
import crypto from "node:crypto";
|
||||
var foo = new Uint8Array(65536);
|
||||
bench("crypto.getRandomValues(65536)", () => {
|
||||
crypto.getRandomValues(foo);
|
||||
@@ -22,4 +16,8 @@ bench("crypto.randomUUID()", () => {
|
||||
return crypto.randomUUID()[2];
|
||||
});
|
||||
|
||||
bench("crypto.randomInt()", () => {
|
||||
return crypto.randomInt(0, 100);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
12
bench/snippets/error-capturestack.mjs
Normal file
12
bench/snippets/error-capturestack.mjs
Normal file
@@ -0,0 +1,12 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
var err = new Error();
|
||||
bench("Error.captureStackTrace(err)", () => {
|
||||
Error.captureStackTrace(err);
|
||||
});
|
||||
|
||||
bench("Error.prototype.stack", () => {
|
||||
new Error().stack;
|
||||
});
|
||||
|
||||
await run();
|
||||
65
bench/snippets/module-exports-putter.cjs
Normal file
65
bench/snippets/module-exports-putter.cjs
Normal file
@@ -0,0 +1,65 @@
|
||||
// This is a stress test of some internals in How Bun does the module.exports assignment.
|
||||
// If it crashes or throws then this fails
|
||||
import("./runner.mjs").then(({ bench, run }) => {
|
||||
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
|
||||
Object.defineProperty(module, "exports", {
|
||||
get() {
|
||||
return 42;
|
||||
},
|
||||
set() {
|
||||
throw new Error("bad");
|
||||
},
|
||||
configurable: true,
|
||||
});
|
||||
if (module.exports !== 42) throw new Error("bad");
|
||||
if (!Object.getOwnPropertyDescriptor(module, "exports").get) throw new Error("bad");
|
||||
});
|
||||
|
||||
bench("Object.defineProperty(module.exports = {})", () => {
|
||||
Object.defineProperty(module, "exports", {
|
||||
value: { abc: 123 },
|
||||
});
|
||||
|
||||
if (!module.exports.abc) throw new Error("bad");
|
||||
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
|
||||
});
|
||||
|
||||
bench("module.exports = {}", () => {
|
||||
module.exports = { abc: 123 };
|
||||
|
||||
if (!module.exports.abc) throw new Error("bad");
|
||||
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
|
||||
});
|
||||
|
||||
run().then(() => {
|
||||
module.exports = {
|
||||
a: 1,
|
||||
};
|
||||
|
||||
console.log(
|
||||
module?.exports,
|
||||
require.cache[module.id].exports,
|
||||
module?.exports === require.cache[module.id],
|
||||
__dirname,
|
||||
Object.keys(require(module.id)),
|
||||
require(module.id),
|
||||
);
|
||||
|
||||
module.exports = function lol() {
|
||||
return 42;
|
||||
};
|
||||
|
||||
console.log(module.exports, module.exports());
|
||||
|
||||
queueMicrotask(() => {
|
||||
console.log(
|
||||
module?.exports,
|
||||
require.cache[module.id].exports,
|
||||
module?.exports === require.cache[module.id]?.exports,
|
||||
__dirname,
|
||||
Object.keys(require(module.id)),
|
||||
require(module.id),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
33
bench/snippets/process-info.mjs
Normal file
33
bench/snippets/process-info.mjs
Normal file
@@ -0,0 +1,33 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
bench("process.memoryUsage()", () => {
|
||||
process.memoryUsage();
|
||||
});
|
||||
|
||||
bench("process.memoryUsage.rss()", () => {
|
||||
process.memoryUsage.rss();
|
||||
});
|
||||
|
||||
bench("process.cpuUsage()", () => {
|
||||
process.cpuUsage();
|
||||
});
|
||||
|
||||
const init = process.cpuUsage();
|
||||
bench("process.cpuUsage(delta)", () => {
|
||||
process.cpuUsage(init);
|
||||
});
|
||||
|
||||
bench("performance.now()", () => {
|
||||
performance.now();
|
||||
});
|
||||
|
||||
bench("process.hrtime()", () => {
|
||||
process.hrtime();
|
||||
});
|
||||
|
||||
bench("process.hrtime.bigint()", () => {
|
||||
process.hrtime.bigint();
|
||||
});
|
||||
|
||||
await run();
|
||||
17
bench/snippets/readfile-not-found.mjs
Normal file
17
bench/snippets/readfile-not-found.mjs
Normal file
@@ -0,0 +1,17 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
|
||||
bench(`readFileSync(/tmp/404-not-found)`, () => {
|
||||
try {
|
||||
readFileSync("/tmp/404-not-found");
|
||||
} catch (e) {}
|
||||
});
|
||||
|
||||
bench(`readFile(/tmp/404-not-found)`, async () => {
|
||||
try {
|
||||
await readFile("/tmp/404-not-found");
|
||||
} catch (e) {}
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,4 +1,10 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
for (let i = 0; i < count; i++) realpathSync(arg);
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("realpathSync x " + count, () => {
|
||||
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
136
bench/snippets/response-arrayBuffer.mjs
Normal file
136
bench/snippets/response-arrayBuffer.mjs
Normal file
@@ -0,0 +1,136 @@
|
||||
// This snippet mostly exists to reproduce a memory leak
|
||||
//
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const obj = {
|
||||
"id": 1296269,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
|
||||
"name": "Hello-World",
|
||||
"full_name": "octocat/Hello-World",
|
||||
"owner": {
|
||||
"login": "octocat",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjE=",
|
||||
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/octocat",
|
||||
"html_url": "https://github.com/octocat",
|
||||
"followers_url": "https://api.github.com/users/octocat/followers",
|
||||
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/octocat/orgs",
|
||||
"repos_url": "https://api.github.com/users/octocat/repos",
|
||||
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/octocat/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false,
|
||||
},
|
||||
"private": false,
|
||||
"html_url": "https://github.com/octocat/Hello-World",
|
||||
"description": "This your first repo!",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/octocat/Hello-World",
|
||||
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
|
||||
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
|
||||
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
|
||||
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
|
||||
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
|
||||
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
|
||||
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
|
||||
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
|
||||
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
|
||||
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
|
||||
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
|
||||
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
|
||||
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
|
||||
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
|
||||
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
|
||||
"git_url": "git:github.com/octocat/Hello-World.git",
|
||||
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
|
||||
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
|
||||
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
|
||||
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
|
||||
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
|
||||
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
|
||||
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
|
||||
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
|
||||
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
|
||||
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
|
||||
"ssh_url": "git@github.com:octocat/Hello-World.git",
|
||||
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
|
||||
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
|
||||
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
|
||||
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
|
||||
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
|
||||
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
|
||||
"clone_url": "https://github.com/octocat/Hello-World.git",
|
||||
"mirror_url": "git:git.example.com/octocat/Hello-World",
|
||||
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
|
||||
"svn_url": "https://svn.github.com/octocat/Hello-World",
|
||||
"homepage": "https://github.com",
|
||||
"language": null,
|
||||
"forks_count": 9,
|
||||
"stargazers_count": 80,
|
||||
"watchers_count": 80,
|
||||
"size": 108,
|
||||
"default_branch": "master",
|
||||
"open_issues_count": 0,
|
||||
"is_template": false,
|
||||
"topics": ["octocat", "atom", "electron", "api"],
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_downloads": true,
|
||||
"has_discussions": false,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"visibility": "public",
|
||||
"pushed_at": "2011-01-26T19:06:43Z",
|
||||
"created_at": "2011-01-26T19:01:12Z",
|
||||
"updated_at": "2011-01-26T19:14:43Z",
|
||||
"permissions": {
|
||||
"admin": false,
|
||||
"push": false,
|
||||
"pull": true,
|
||||
},
|
||||
"security_and_analysis": {
|
||||
"advanced_security": {
|
||||
"status": "enabled",
|
||||
},
|
||||
"secret_scanning": {
|
||||
"status": "enabled",
|
||||
},
|
||||
"secret_scanning_push_protection": {
|
||||
"status": "disabled",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Force the string to be 8bit
|
||||
const str = String.fromCharCode(
|
||||
...JSON.stringify(obj)
|
||||
.split("")
|
||||
.map(a => a.charCodeAt(0)),
|
||||
);
|
||||
var i = 0;
|
||||
|
||||
bench("new Response().arrayBuffer() (new string each call, latin1)", async () => {
|
||||
return await new Response(str + i++).arrayBuffer();
|
||||
});
|
||||
|
||||
bench("new Response().arrayBuffer() (new string each call, utf16)", async () => {
|
||||
return await new Response(str + i++ + "😊").arrayBuffer();
|
||||
});
|
||||
|
||||
bench("new Response().arrayBuffer() (existing string, latin1)", async () => {
|
||||
return await new Response(str).arrayBuffer();
|
||||
});
|
||||
|
||||
await run();
|
||||
123
bench/snippets/response-json.mjs
Normal file
123
bench/snippets/response-json.mjs
Normal file
@@ -0,0 +1,123 @@
|
||||
// This snippet mostly exists to reproduce a memory leak
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const obj = {
|
||||
"id": 1296269,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
|
||||
"name": "Hello-World",
|
||||
"full_name": "octocat/Hello-World",
|
||||
"owner": {
|
||||
"login": "octocat",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjE=",
|
||||
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/octocat",
|
||||
"html_url": "https://github.com/octocat",
|
||||
"followers_url": "https://api.github.com/users/octocat/followers",
|
||||
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/octocat/orgs",
|
||||
"repos_url": "https://api.github.com/users/octocat/repos",
|
||||
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/octocat/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false,
|
||||
},
|
||||
"private": false,
|
||||
"html_url": "https://github.com/octocat/Hello-World",
|
||||
"description": "This your first repo!",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/octocat/Hello-World",
|
||||
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
|
||||
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
|
||||
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
|
||||
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
|
||||
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
|
||||
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
|
||||
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
|
||||
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
|
||||
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
|
||||
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
|
||||
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
|
||||
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
|
||||
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
|
||||
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
|
||||
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
|
||||
"git_url": "git:github.com/octocat/Hello-World.git",
|
||||
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
|
||||
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
|
||||
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
|
||||
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
|
||||
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
|
||||
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
|
||||
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
|
||||
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
|
||||
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
|
||||
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
|
||||
"ssh_url": "git@github.com:octocat/Hello-World.git",
|
||||
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
|
||||
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
|
||||
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
|
||||
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
|
||||
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
|
||||
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
|
||||
"clone_url": "https://github.com/octocat/Hello-World.git",
|
||||
"mirror_url": "git:git.example.com/octocat/Hello-World",
|
||||
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
|
||||
"svn_url": "https://svn.github.com/octocat/Hello-World",
|
||||
"homepage": "https://github.com",
|
||||
"language": null,
|
||||
"forks_count": 9,
|
||||
"stargazers_count": 80,
|
||||
"watchers_count": 80,
|
||||
"size": 108,
|
||||
"default_branch": "master",
|
||||
"open_issues_count": 0,
|
||||
"is_template": false,
|
||||
"topics": ["octocat", "atom", "electron", "api"],
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_downloads": true,
|
||||
"has_discussions": false,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"visibility": "public",
|
||||
"pushed_at": "2011-01-26T19:06:43Z",
|
||||
"created_at": "2011-01-26T19:01:12Z",
|
||||
"updated_at": "2011-01-26T19:14:43Z",
|
||||
"permissions": {
|
||||
"admin": false,
|
||||
"push": false,
|
||||
"pull": true,
|
||||
},
|
||||
"security_and_analysis": {
|
||||
"advanced_security": {
|
||||
"status": "enabled",
|
||||
},
|
||||
"secret_scanning": {
|
||||
"status": "enabled",
|
||||
},
|
||||
"secret_scanning_push_protection": {
|
||||
"status": "disabled",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
bench("Response.json(obj)", async () => {
|
||||
return Response.json(obj);
|
||||
});
|
||||
|
||||
bench("Response.json(obj).json()", async () => {
|
||||
return await Response.json(obj).json();
|
||||
});
|
||||
|
||||
await run();
|
||||
128
bench/snippets/serialize.mjs
Normal file
128
bench/snippets/serialize.mjs
Normal file
@@ -0,0 +1,128 @@
|
||||
import { serialize, deserialize } from "node:v8";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
const obj = {
|
||||
"id": 1296269,
|
||||
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
|
||||
"name": "Hello-World",
|
||||
"full_name": "octocat/Hello-World",
|
||||
"owner": {
|
||||
"login": "octocat",
|
||||
"id": 1,
|
||||
"node_id": "MDQ6VXNlcjE=",
|
||||
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/octocat",
|
||||
"html_url": "https://github.com/octocat",
|
||||
"followers_url": "https://api.github.com/users/octocat/followers",
|
||||
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/octocat/orgs",
|
||||
"repos_url": "https://api.github.com/users/octocat/repos",
|
||||
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/octocat/received_events",
|
||||
"type": "User",
|
||||
"site_admin": false,
|
||||
},
|
||||
"private": false,
|
||||
"html_url": "https://github.com/octocat/Hello-World",
|
||||
"description": "This your first repo!",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/octocat/Hello-World",
|
||||
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
|
||||
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
|
||||
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
|
||||
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
|
||||
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
|
||||
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
|
||||
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
|
||||
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
|
||||
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
|
||||
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
|
||||
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
|
||||
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
|
||||
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
|
||||
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
|
||||
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
|
||||
"git_url": "git:github.com/octocat/Hello-World.git",
|
||||
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
|
||||
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
|
||||
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
|
||||
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
|
||||
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
|
||||
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
|
||||
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
|
||||
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
|
||||
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
|
||||
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
|
||||
"ssh_url": "git@github.com:octocat/Hello-World.git",
|
||||
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
|
||||
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
|
||||
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
|
||||
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
|
||||
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
|
||||
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
|
||||
"clone_url": "https://github.com/octocat/Hello-World.git",
|
||||
"mirror_url": "git:git.example.com/octocat/Hello-World",
|
||||
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
|
||||
"svn_url": "https://svn.github.com/octocat/Hello-World",
|
||||
"homepage": "https://github.com",
|
||||
"language": null,
|
||||
"forks_count": 9,
|
||||
"stargazers_count": 80,
|
||||
"watchers_count": 80,
|
||||
"size": 108,
|
||||
"default_branch": "master",
|
||||
"open_issues_count": 0,
|
||||
"is_template": false,
|
||||
"topics": ["octocat", "atom", "electron", "api"],
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_downloads": true,
|
||||
"has_discussions": false,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"visibility": "public",
|
||||
"pushed_at": "2011-01-26T19:06:43Z",
|
||||
"created_at": "2011-01-26T19:01:12Z",
|
||||
"updated_at": "2011-01-26T19:14:43Z",
|
||||
"permissions": {
|
||||
"admin": false,
|
||||
"push": false,
|
||||
"pull": true,
|
||||
},
|
||||
"security_and_analysis": {
|
||||
"advanced_security": {
|
||||
"status": "enabled",
|
||||
},
|
||||
"secret_scanning": {
|
||||
"status": "enabled",
|
||||
},
|
||||
"secret_scanning_push_protection": {
|
||||
"status": "disabled",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
bench("serialize", () => {
|
||||
serialize(obj);
|
||||
});
|
||||
const serialized = serialize(obj);
|
||||
bench("deserialize", () => {
|
||||
deserialize(serialized);
|
||||
});
|
||||
|
||||
if (typeof Bun !== "undefined") {
|
||||
if (!Bun.deepEquals(obj, deserialize(serialized))) {
|
||||
throw new Error("not equal");
|
||||
}
|
||||
}
|
||||
|
||||
await run();
|
||||
39
bench/snippets/structuredClone.mjs
Normal file
39
bench/snippets/structuredClone.mjs
Normal file
@@ -0,0 +1,39 @@
|
||||
var testArray = [
|
||||
{
|
||||
description: "Random description.",
|
||||
testNumber: 123456789,
|
||||
testBoolean: true,
|
||||
testObject: {
|
||||
testString: "test string",
|
||||
testNumber: 12345,
|
||||
},
|
||||
testArray: [
|
||||
{
|
||||
myName: "test name",
|
||||
myNumber: 123245,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
description: "Random description.",
|
||||
testNumber: 123456789,
|
||||
testBoolean: true,
|
||||
testObject: {
|
||||
testString: "test string",
|
||||
testNumber: 12345,
|
||||
},
|
||||
testArray: [
|
||||
{
|
||||
myName: "test name",
|
||||
myNumber: 123245,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("structuredClone(array)", () => structuredClone(testArray));
|
||||
bench("structuredClone(123)", () => structuredClone(123));
|
||||
bench("structuredClone({a: 123})", () => structuredClone({ a: 123 }));
|
||||
await run();
|
||||
60
bench/snippets/util-deprecate.mjs
Normal file
60
bench/snippets/util-deprecate.mjs
Normal file
@@ -0,0 +1,60 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
function deprecateUsingClosure(fn, msg, code) {
|
||||
if (process.noDeprecation === true) {
|
||||
return fn;
|
||||
}
|
||||
|
||||
var realFn = fn;
|
||||
var wrapper = () => {
|
||||
return fnToWrap.apply(this, arguments);
|
||||
};
|
||||
|
||||
var deprecater = () => {
|
||||
if (process.throwDeprecation) {
|
||||
var err = new Error(msg);
|
||||
if (code) err.code = code;
|
||||
throw err;
|
||||
} else if (process.traceDeprecation) {
|
||||
console.trace(msg);
|
||||
} else {
|
||||
console.error(msg);
|
||||
}
|
||||
|
||||
fnToWrap = realFn;
|
||||
return realFn.apply(this, arguments);
|
||||
};
|
||||
var fnToWrap = deprecater;
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
function deprecateOriginal(fn, msg) {
|
||||
var warned = false;
|
||||
function deprecated() {
|
||||
if (!warned) {
|
||||
if (process.throwDeprecation) {
|
||||
throw new Error(msg);
|
||||
} else if (process.traceDeprecation) {
|
||||
console.trace(msg);
|
||||
} else {
|
||||
console.error(msg);
|
||||
}
|
||||
warned = true;
|
||||
}
|
||||
return fn.apply(this, arguments);
|
||||
}
|
||||
return deprecated;
|
||||
}
|
||||
|
||||
const deprecatedy = deprecateUsingClosure(() => {}, "This is deprecated", "DEP0001");
|
||||
const deprecatedy2 = deprecateOriginal(() => {}, "This is deprecated");
|
||||
|
||||
bench("deprecateUsingClosure", () => {
|
||||
deprecatedy(Math.random() + 1);
|
||||
});
|
||||
|
||||
bench("deprecateOriginal", () => {
|
||||
deprecatedy2(Math.random() + 1);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -32,6 +32,7 @@ const server = Bun.serve({
|
||||
},
|
||||
|
||||
perMessageDeflate: false,
|
||||
publishToSelf: true,
|
||||
},
|
||||
|
||||
fetch(req, server) {
|
||||
|
||||
@@ -42,4 +42,4 @@ function sendReadyMessage() {
|
||||
|
||||
console.log(`Waiting for ${CLIENTS_TO_WAIT_FOR} clients to connect..`);
|
||||
|
||||
Deno.serve(reqHandler, { port });
|
||||
Deno.serve({ port }, reqHandler);
|
||||
|
||||
22
build.zig
22
build.zig
@@ -1,4 +1,5 @@
|
||||
const std = @import("std");
|
||||
const Wyhash = @import("./src/wyhash.zig").Wyhash;
|
||||
|
||||
fn moduleSource(comptime out: []const u8) FileSource {
|
||||
if (comptime std.fs.path.dirname(@src().file)) |base| {
|
||||
@@ -76,13 +77,13 @@ const BunBuildOptions = struct {
|
||||
|
||||
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
|
||||
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
|
||||
const runtime_hash = std.hash.Wyhash.hash(
|
||||
const runtime_hash = Wyhash.hash(
|
||||
0,
|
||||
try runtime_out_file.readToEndAlloc(std.heap.page_allocator, try runtime_out_file.getEndPos()),
|
||||
);
|
||||
this.runtime_js_version = runtime_hash;
|
||||
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
|
||||
const fallback_hash = std.hash.Wyhash.hash(
|
||||
const fallback_hash = Wyhash.hash(
|
||||
0,
|
||||
try fallback_out_file.readToEndAlloc(std.heap.page_allocator, try fallback_out_file.getEndPos()),
|
||||
);
|
||||
@@ -192,12 +193,12 @@ pub fn build(b: *Build) !void {
|
||||
else
|
||||
"root.zig";
|
||||
|
||||
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
|
||||
const min_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
|
||||
target.getOsVersionMin().semver
|
||||
else
|
||||
.{ .major = 0, .minor = 0, .patch = 0 };
|
||||
|
||||
const max_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
|
||||
const max_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
|
||||
target.getOsVersionMax().semver
|
||||
else
|
||||
.{ .major = 0, .minor = 0, .patch = 0 };
|
||||
@@ -380,6 +381,19 @@ pub fn build(b: *Build) !void {
|
||||
headers_obj.addOptions("build_options", default_build_options.step(b));
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("stream-tester-obj", "Build stream-tester (object files)");
|
||||
var headers_obj = b.addObject(.{
|
||||
.name = "stream-tester",
|
||||
.root_source_file = FileSource.relative("src/stream_tester.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
|
||||
headers_obj.addOptions("build_options", default_build_options.step(b));
|
||||
}
|
||||
|
||||
{
|
||||
const headers_step = b.step("string-bench", "Build string bench");
|
||||
var headers_obj = b.addExecutable(.{
|
||||
|
||||
@@ -92,10 +92,10 @@ _bun_completions() {
|
||||
PACKAGE_OPTIONS[REMOVE_OPTIONS_LONG]="";
|
||||
PACKAGE_OPTIONS[REMOVE_OPTIONS_SHORT]="";
|
||||
|
||||
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
|
||||
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --frozen-lockfile --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
|
||||
PACKAGE_OPTIONS[SHARED_OPTIONS_SHORT]="-c -y -p -f -g";
|
||||
|
||||
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
|
||||
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --frozen-lockfile --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
|
||||
PM_OPTIONS[SHORT_OPTIONS]="-c -y -p -f -g"
|
||||
|
||||
local cur_word="${COMP_WORDS[${COMP_CWORD}]}";
|
||||
|
||||
@@ -47,6 +47,7 @@ _bun() {
|
||||
'-g[Add a package globally]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
@@ -88,6 +89,7 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
@@ -123,6 +125,7 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--global[Add a package globally]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--optional[Add dependency to optionalDependencies]' \
|
||||
'--development[Add dependency to devDependencies]' \
|
||||
'-d[Add dependency to devDependencies]' \
|
||||
@@ -278,6 +281,7 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]'
|
||||
'-p[Do not install devDependencies]'
|
||||
'--production[Do not install devDependencies]'
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--no-save[Do not save a lockfile]'
|
||||
'--dry-run[Do not install anything]'
|
||||
'--lockfile[Store & load a lockfile at a specific filepath]'
|
||||
@@ -532,6 +536,7 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'--force[Always request the latest versions from the registry & reinstall all dependenices]' \
|
||||
@@ -565,6 +570,7 @@ _bun() {
|
||||
'--yarn[Write a yarn.lock file (yarn v1)]' \
|
||||
'--production[Don'"'"'t install devDependencies]' \
|
||||
'-p[Don'"'"'t install devDependencies]' \
|
||||
'--frozen-lockfile[Disallow changes to lockfile]' \
|
||||
'--no-save[]' \
|
||||
'--dry-run[Don'"'"'t install anything]' \
|
||||
'-g[Remove a package globally]' \
|
||||
|
||||
@@ -115,6 +115,7 @@ subcommands:
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
@@ -152,6 +153,7 @@ subcommands:
|
||||
- development -- "Add dependency to devDependencies"
|
||||
- d -- "Add dependency to devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
@@ -192,6 +194,7 @@ subcommands:
|
||||
- yarn -- "Write a yarn.lock file (yarn v1)"
|
||||
- production -- "Don't install devDependencies"
|
||||
- p -- "Don't install devDependencies"
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
|
||||
@@ -202,6 +202,53 @@ const response = await fetch("https://bun.sh");
|
||||
await Bun.write("index.html", response);
|
||||
```
|
||||
|
||||
## Incremental writing with `FileSink`
|
||||
|
||||
Bun provides a native incremental file writing API called `FileSink`. To retrieve a `FileSink` instance from a `BunFile`:
|
||||
|
||||
```ts
|
||||
const file = Bun.file("output.txt");
|
||||
const writer = file.writer();
|
||||
```
|
||||
|
||||
To incrementally write to the file, call `.write()`.
|
||||
|
||||
```ts
|
||||
const file = Bun.file("output.txt");
|
||||
const writer = file.writer();
|
||||
|
||||
writer.write("it was the best of times\n");
|
||||
writer.write("it was the worst of times\n");
|
||||
```
|
||||
|
||||
These chunks will be buffered internally. To flush the buffer to disk, use `.flush()`. This returns the number of flushed bytes.
|
||||
|
||||
```ts
|
||||
writer.flush(); // write buffer to disk
|
||||
```
|
||||
|
||||
The buffer will also auto-flush when the `FileSink`'s _high water mark_ is reached; that is, when its internal buffer is full. This value can be configured.
|
||||
|
||||
```ts
|
||||
const file = Bun.file("output.txt");
|
||||
const writer = file.writer({ highWaterMark: 1024 * 1024 }); // 1MB
|
||||
```
|
||||
|
||||
To flush the buffer and close the file:
|
||||
|
||||
```ts
|
||||
writer.end();
|
||||
```
|
||||
|
||||
Note that, by default, the `bun` process will stay alive until this `FileSink` is explicitly closed with `.end()`. To opt out of this behavior, you can "unref" the instance.
|
||||
|
||||
```ts
|
||||
writer.unref();
|
||||
|
||||
// to "re-ref" it later
|
||||
writer.ref();
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
The following is a 3-line implementation of the Linux `cat` command.
|
||||
@@ -238,7 +285,13 @@ interface Bun {
|
||||
|
||||
write(
|
||||
destination: string | number | BunFile | URL,
|
||||
input: string | Blob | ArrayBuffer | SharedArrayBuffer | TypedArray | Response,
|
||||
input:
|
||||
| string
|
||||
| Blob
|
||||
| ArrayBuffer
|
||||
| SharedArrayBuffer
|
||||
| TypedArray
|
||||
| Response,
|
||||
): Promise<number>;
|
||||
}
|
||||
|
||||
@@ -250,5 +303,17 @@ interface BunFile {
|
||||
stream(): Promise<ReadableStream>;
|
||||
arrayBuffer(): Promise<ArrayBuffer>;
|
||||
json(): Promise<any>;
|
||||
writer(params: { highWaterMark?: number }): FileSink;
|
||||
}
|
||||
|
||||
export interface FileSink {
|
||||
write(
|
||||
chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
): number;
|
||||
flush(): number | Promise<number>;
|
||||
end(error?: Error): number | Promise<number>;
|
||||
start(options?: { highWaterMark?: number }): void;
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
}
|
||||
```
|
||||
|
||||
@@ -4,6 +4,71 @@ Bun implements the `createHash` and `createHmac` functions from [`node:crypto`](
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## `Bun.password`
|
||||
|
||||
{% callout %}
|
||||
**Note** — Added in Bun 0.6.8.
|
||||
{% /callout %}
|
||||
|
||||
`Bun.password` is a collection of utility functions for hashing and verifying passwords with various cryptographically secure algorithms.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
const hash = await Bun.password.hash(password);
|
||||
// => $argon2id$v=19$m=65536,t=2,p=1$tFq+9AVr1bfPxQdh6E8DQRhEXg/M/SqYCNu6gVdRRNs$GzJ8PuBi+K+BVojzPfS5mjnC8OpLGtv8KJqF99eP6a4
|
||||
|
||||
const isMatch = await Bun.password.verify(password, hash);
|
||||
// => true
|
||||
```
|
||||
|
||||
The second argument to `Bun.password.hash` accepts a params object that lets you pick and configure the hashing algorithm.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
// use argon2 (default)
|
||||
const argonHash = await Bun.password.hash(password, {
|
||||
algorithm: "argon2id", // "argon2id" | "argon2i" | "argon2d"
|
||||
memoryCost: 4, // memory usage in kibibytes
|
||||
timeCost: 3, // the number of iterations
|
||||
});
|
||||
|
||||
// use bcrypt
|
||||
const bcryptHash = await Bun.password.hash(password, {
|
||||
algorithm: "bcrypt",
|
||||
cost: 4, // number between 4-31
|
||||
});
|
||||
```
|
||||
|
||||
The algorithm used to create the hash is stored in the hash itself. When using `bcrypt`, the returned hash is encoded in [Modular Crypt Format](https://passlib.readthedocs.io/en/stable/modular_crypt_format.html) for compatibility with most existing `bcrypt` implementations; with `argon2` the result is encoded in the newer [PHC format](https://github.com/P-H-C/phc-string-format/blob/master/phc-sf-spec.md).
|
||||
|
||||
The `verify` function automatically detects the algorithm based on the input hash and use the correct verification method. It can correctly infer the algorithm from both PHC- or MCF-encoded hashes.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
const hash = await Bun.password.hash(password, {
|
||||
/* config */
|
||||
});
|
||||
|
||||
const isMatch = await Bun.password.verify(password, hash);
|
||||
// => true
|
||||
```
|
||||
|
||||
Synchronous versions of all functions are also available. Keep in mind that these functions are computationally expensive, so using a blocking API may degrade application performance.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
const hash = Bun.password.hashSync(password, {
|
||||
/* config */
|
||||
});
|
||||
|
||||
const isMatch = Bun.password.verifySync(password, hash);
|
||||
// => true
|
||||
```
|
||||
|
||||
## `Bun.hash`
|
||||
|
||||
`Bun.hash` is a collection of utilities for _non-cryptographic_ hashing. Non-cryptographic hashing algorithms are optimized for speed of computation over collision-resistance or security.
|
||||
|
||||
108
docs/api/http.md
108
docs/api/http.md
@@ -67,7 +67,7 @@ Bun.serve({
|
||||
fetch(req) {
|
||||
throw new Error("woops!");
|
||||
},
|
||||
error(error: Error) {
|
||||
error(error) {
|
||||
return new Response(`<pre>${error}\n${error.stack}</pre>`, {
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
@@ -95,38 +95,37 @@ server.stop();
|
||||
|
||||
## TLS
|
||||
|
||||
Bun supports TLS out of the box, powered by [OpenSSL](https://www.openssl.org/). Enable TLS by passing in a value for `key` and `cert`; both are required to enable TLS. If needed, supply a `passphrase` to decrypt the `keyFile`.
|
||||
Bun supports TLS out of the box, powered by [BoringSSL](https://boringssl.googlesource.com/boringssl). Enable TLS by passing in a value for `key` and `cert`; both are required to enable TLS.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
return new Response("Hello!!!");
|
||||
},
|
||||
```ts-diff
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
return new Response("Hello!!!");
|
||||
},
|
||||
|
||||
// can be string, BunFile, TypedArray, Buffer, or array thereof
|
||||
key: Bun.file("./key.pem"),
|
||||
cert: Bun.file("./cert.pem"),
|
||||
|
||||
// passphrase, only required if key is encrypted
|
||||
passphrase: "super-secret",
|
||||
});
|
||||
+ tls: {
|
||||
+ key: Bun.file("./key.pem"),
|
||||
+ cert: Bun.file("./cert.pem"),
|
||||
+ }
|
||||
});
|
||||
```
|
||||
|
||||
The `key` and `cert` fields expect the _contents_ of your TLS key and certificate. This can be a string, `BunFile`, `TypedArray`, or `Buffer`.
|
||||
The `key` and `cert` fields expect the _contents_ of your TLS key and certificate, _not a path to it_. This can be a string, `BunFile`, `TypedArray`, or `Buffer`.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
fetch() {},
|
||||
|
||||
// BunFile
|
||||
key: Bun.file("./key.pem"),
|
||||
// Buffer
|
||||
key: fs.readFileSync("./key.pem"),
|
||||
// string
|
||||
key: fs.readFileSync("./key.pem", "utf8"),
|
||||
// array of above
|
||||
key: [Bun.file('./key1.pem'), Bun.file('./key2.pem']
|
||||
|
||||
tls: {
|
||||
// BunFile
|
||||
key: Bun.file("./key.pem"),
|
||||
// Buffer
|
||||
key: fs.readFileSync("./key.pem"),
|
||||
// string
|
||||
key: fs.readFileSync("./key.pem", "utf8"),
|
||||
// array of above
|
||||
key: [Bun.file("./key1.pem"), Bun.file("./key2.pem")],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
@@ -136,17 +135,35 @@ Bun.serve({
|
||||
|
||||
{% /callout %}
|
||||
|
||||
If your private key is encrypted with a passphrase, provide a value for `passphrase` to decrypt it.
|
||||
|
||||
```ts-diff
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
return new Response("Hello!!!");
|
||||
},
|
||||
|
||||
tls: {
|
||||
key: Bun.file("./key.pem"),
|
||||
cert: Bun.file("./cert.pem"),
|
||||
+ passphrase: "my-secret-passphrase",
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Optionally, you can override the trusted CA certificates by passing a value for `ca`. By default, the server will trust the list of well-known CAs curated by Mozilla. When `ca` is specified, the Mozilla list is overwritten.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
return new Response("Hello!!!");
|
||||
},
|
||||
key: Bun.file("./key.pem"), // path to TLS key
|
||||
cert: Bun.file("./cert.pem"), // path to TLS cert
|
||||
ca: Bun.file("./ca.pem"), // path to root CA certificate
|
||||
});
|
||||
```ts-diff
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
return new Response("Hello!!!");
|
||||
},
|
||||
tls: {
|
||||
key: Bun.file("./key.pem"), // path to TLS key
|
||||
cert: Bun.file("./cert.pem"), // path to TLS cert
|
||||
+ ca: Bun.file("./ca.pem"), // path to root CA certificate
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
To override Diffie-Helman parameters:
|
||||
@@ -154,7 +171,10 @@ To override Diffie-Helman parameters:
|
||||
```ts
|
||||
Bun.serve({
|
||||
// ...
|
||||
dhParamsFile: "./dhparams.pem", // path to Diffie Helman parameters
|
||||
tls: {
|
||||
// other config
|
||||
dhParamsFile: "/path/to/dhparams.pem", // path to Diffie Helman parameters
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
@@ -275,11 +295,21 @@ interface Bun {
|
||||
port?: number;
|
||||
development?: boolean;
|
||||
error?: (error: Error) => Response | Promise<Response>;
|
||||
keyFile?: string;
|
||||
certFile?: string;
|
||||
caFile?: string;
|
||||
dhParamsFile?: string;
|
||||
passphrase?: string;
|
||||
tls?: {
|
||||
key?:
|
||||
| string
|
||||
| TypedArray
|
||||
| BunFile
|
||||
| Array<string | TypedArray | BunFile>;
|
||||
cert?:
|
||||
| string
|
||||
| TypedArray
|
||||
| BunFile
|
||||
| Array<string | TypedArray | BunFile>;
|
||||
ca?: string | TypedArray | BunFile | Array<string | TypedArray | BunFile>;
|
||||
passphrase?: string;
|
||||
dhParamsFile?: string;
|
||||
};
|
||||
maxRequestBodySize?: number;
|
||||
lowMemoryMode?: boolean;
|
||||
}): Server;
|
||||
|
||||
@@ -28,8 +28,6 @@ for await (const chunk of stream) {
|
||||
}
|
||||
```
|
||||
|
||||
For a more complete discusson of streams in Bun, see [API > Streams](/docs/api/streams).
|
||||
|
||||
## Direct `ReadableStream`
|
||||
|
||||
Bun implements an optimized version of `ReadableStream` that avoid unnecessary data copying & queue management logic. With a traditional `ReadableStream`, chunks of data are _enqueued_. Each chunk is copied into a queue, where it sits until the stream is ready to send more data.
|
||||
@@ -154,7 +152,9 @@ export class ArrayBufferSink {
|
||||
stream?: boolean;
|
||||
}): void;
|
||||
|
||||
write(chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer): number;
|
||||
write(
|
||||
chunk: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
): number;
|
||||
/**
|
||||
* Flush the internal buffer
|
||||
*
|
||||
|
||||
@@ -75,16 +75,6 @@ Bun.sleepSync(1000); // blocks thread for one second
|
||||
console.log("hello one second later!");
|
||||
```
|
||||
|
||||
Alternatively, pass a `Date` object to receive a `Promise` that resolves at that point in time.
|
||||
|
||||
```ts
|
||||
const oneSecondInFuture = new Date(Date.now() + 1000);
|
||||
|
||||
console.log("hello");
|
||||
await Bun.sleep(oneSecondInFuture);
|
||||
console.log("hello one second later!");
|
||||
```
|
||||
|
||||
## `Bun.which()`
|
||||
|
||||
`Bun.which(bin: string)`
|
||||
@@ -159,7 +149,9 @@ test("peek", () => {
|
||||
// If we peek a rejected promise, it:
|
||||
// - returns the error
|
||||
// - does not mark the promise as handled
|
||||
const rejected = Promise.reject(new Error("Successfully tested promise rejection"));
|
||||
const rejected = Promise.reject(
|
||||
new Error("Successfully tested promise rejection"),
|
||||
);
|
||||
expect(peek(rejected).message).toBe("Successfully tested promise rejection");
|
||||
});
|
||||
```
|
||||
@@ -293,7 +285,7 @@ console.log(url); // "file:///foo/bar.txt"
|
||||
|
||||
## `Bun.gzipSync()`
|
||||
|
||||
Compresses a `Uint8Array` using zlib's DEFLATE algorithm.
|
||||
Compresses a `Uint8Array` using zlib's GZIP algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100)); // Buffer extends Uint8Array
|
||||
@@ -382,7 +374,7 @@ export type ZlibCompressionOptions = {
|
||||
|
||||
## `Bun.gunzipSync()`
|
||||
|
||||
Uncompresses a `Uint8Array` using zlib's INFLATE algorithm.
|
||||
Decompresses a `Uint8Array` using zlib's GUNZIP algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100)); // Buffer extends Uint8Array
|
||||
@@ -410,15 +402,15 @@ The second argument supports the same set of configuration options as [`Bun.gzip
|
||||
|
||||
## `Bun.inflateSync()`
|
||||
|
||||
Uncompresses a `Uint8Array` using zlib's INFLATE algorithm.
|
||||
DEcompresses a `Uint8Array` using zlib's INFLATE algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
const compressed = Bun.deflateSync(buf);
|
||||
|
||||
const dec = new TextDecoder();
|
||||
const uncompressed = Bun.inflateSync(compressed);
|
||||
dec.decode(uncompressed);
|
||||
const decompressed = Bun.inflateSync(compressed);
|
||||
dec.decode(decompressed);
|
||||
// => "hellohellohello..."
|
||||
```
|
||||
|
||||
@@ -494,3 +486,17 @@ To resolve relative to the directory containing the current file, pass `import.m
|
||||
```ts
|
||||
Bun.resolveSync("./foo.ts", import.meta.dir);
|
||||
```
|
||||
|
||||
## `serialize` & `deserialize` in `bun:jsc`
|
||||
|
||||
To save a JavaScript value into an ArrayBuffer & back, use `serialize` and `deserialize` from the `"bun:jsc"` module.
|
||||
|
||||
```js
|
||||
import { serialize, deserialize } from "bun:jsc";
|
||||
|
||||
const buf = serialize({ foo: "bar" });
|
||||
const obj = deserialize(buf);
|
||||
console.log(obj); // => { foo: "bar" }
|
||||
```
|
||||
|
||||
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
|
||||
|
||||
@@ -12,41 +12,7 @@
|
||||
Internally Bun's WebSocket implementation is built on [uWebSockets](https://github.com/uNetworking/uWebSockets).
|
||||
{% /callout %}
|
||||
|
||||
## Connect to a WebSocket server
|
||||
|
||||
To connect to an external socket server, create an instance of `WebSocket` with the constructor.
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000");
|
||||
```
|
||||
|
||||
Bun supports setting custom headers. This is a Bun-specific extension of the `WebSocket` standard.
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000", {
|
||||
headers: {
|
||||
// custom headers
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
To add event listeners to the socket:
|
||||
|
||||
```ts
|
||||
// message is received
|
||||
socket.addEventListener("message", event => {});
|
||||
|
||||
// socket opened
|
||||
socket.addEventListener("open", event => {});
|
||||
|
||||
// socket closed
|
||||
socket.addEventListener("close", event => {});
|
||||
|
||||
// error handler
|
||||
socket.addEventListener("error", event => {});
|
||||
```
|
||||
|
||||
## Create a WebSocket server
|
||||
## Start a WebSocket server
|
||||
|
||||
Below is a simple WebSocket server built with `Bun.serve`, in which all incoming requests are [upgraded](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) to WebSocket connections in the `fetch` handler. The socket handlers are declared in the `websocket` parameter.
|
||||
|
||||
@@ -109,7 +75,7 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
## Sending messages
|
||||
### Sending messages
|
||||
|
||||
Each `ServerWebSocket` instance has a `.send()` method for sending messages to the client. It supports a range of input types.
|
||||
|
||||
@@ -119,7 +85,7 @@ ws.send(response.arrayBuffer()); // ArrayBuffer
|
||||
ws.send(new Uint8Array([1, 2, 3])); // TypedArray | DataView
|
||||
```
|
||||
|
||||
## Headers
|
||||
### Headers
|
||||
|
||||
Once the upgrade succeeds, Bun will send a `101 Switching Protocols` response per the [spec](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism). Additional `headers` can be attched to this `Response` in the call to `server.upgrade()`.
|
||||
|
||||
@@ -137,7 +103,7 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
## Contextual data
|
||||
### Contextual data
|
||||
|
||||
Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. This data is made available on the `ws.data` property inside the WebSocket handlers.
|
||||
|
||||
@@ -145,16 +111,20 @@ Contextual `data` can be attached to a new WebSocket in the `.upgrade()` call. T
|
||||
type WebSocketData = {
|
||||
createdAt: number;
|
||||
channelId: string;
|
||||
authToken: string;
|
||||
};
|
||||
|
||||
// TypeScript: specify the type of `data`
|
||||
Bun.serve<WebSocketData>({
|
||||
fetch(req, server) {
|
||||
// use a library to parse cookies
|
||||
const cookies = parseCookies(req.headers.get("Cookie"));
|
||||
server.upgrade(req, {
|
||||
// TS: this object must conform to WebSocketData
|
||||
// this object must conform to WebSocketData
|
||||
data: {
|
||||
createdAt: Date.now(),
|
||||
channelId: new URL(req.url).searchParams.get("channelId"),
|
||||
authToken: cookies["X-Token"],
|
||||
},
|
||||
});
|
||||
|
||||
@@ -163,53 +133,76 @@ Bun.serve<WebSocketData>({
|
||||
websocket: {
|
||||
// handler called when a message is received
|
||||
async message(ws, message) {
|
||||
ws.data; // WebSocketData
|
||||
const user = getUserFromToken(ws.data.authToken);
|
||||
|
||||
await saveMessageToDatabase({
|
||||
channel: ws.data.channelId,
|
||||
message: String(message),
|
||||
userId: user.id,
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Pub/Sub
|
||||
To connect to this server from the browser, create a new `WebSocket`.
|
||||
|
||||
```ts#browser.js
|
||||
const socket = new WebSocket("ws://localhost:3000/chat");
|
||||
|
||||
socket.addEventListener("message", event => {
|
||||
console.log(event.data);
|
||||
})
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Identifying users** — The cookies that are currently set on the page will be sent with the WebSocket upgrade request and available on `req.headers` in the `fetch` handler. Parse these cookies to determine the identity of the connecting user and set the value of `data` accordingly.
|
||||
{% /callout %}
|
||||
|
||||
### Pub/Sub
|
||||
|
||||
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic. This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
|
||||
|
||||
```ts
|
||||
const pubsubserver = Bun.serve<{username: string}>({
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
fetch(req, server) {
|
||||
if (req.url === '/chat') {
|
||||
const cookies = getCookieFromRequest(req);
|
||||
const success = server.upgrade(req, {
|
||||
data: {username: cookies.username},
|
||||
});
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/chat") {
|
||||
console.log(`upgrade!`);
|
||||
const username = getUsernameFromReq(req);
|
||||
const success = server.upgrade(req, { data: { username } });
|
||||
return success
|
||||
? undefined
|
||||
: new Response('WebSocket upgrade error', {status: 400});
|
||||
: new Response("WebSocket upgrade error", { status: 400 });
|
||||
}
|
||||
|
||||
return new Response('Hello world');
|
||||
return new Response("Hello world");
|
||||
},
|
||||
websocket: {
|
||||
open(ws) {
|
||||
ws.subscribe('the-group-chat');
|
||||
ws.publish('the-group-chat', `${ws.data.username} has entered the chat`);
|
||||
const msg = `${ws.data.username} has entered the chat`;
|
||||
ws.subscribe("the-group-chat");
|
||||
ws.publish("the-group-chat", msg);
|
||||
},
|
||||
message(ws, message) {
|
||||
// this is a group chat
|
||||
// so the server re-broadcasts incoming message to everyone
|
||||
ws.publish('the-group-chat', `${ws.data.username}: ${message}`);
|
||||
ws.publish("the-group-chat", `${ws.data.username}: ${message}`);
|
||||
},
|
||||
close(ws) {
|
||||
ws.unsubscribe('the-group-chat');
|
||||
ws.publish('the-group-chat', `${ws.data.username} has left the chat`);
|
||||
const msg = `${ws.data.username} has left the chat`;
|
||||
ws.unsubscribe("the-group-chat");
|
||||
ws.publish("the-group-chat", msg);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Listening on ${server.hostname}:${server.port}`);
|
||||
```
|
||||
|
||||
## Compression
|
||||
Calling `.publish(data)` will send the message to all subscribers of a topic _except_ the socket that called `.publish()`.
|
||||
|
||||
### Compression
|
||||
|
||||
Per-message [compression](https://websockets.readthedocs.io/en/stable/topics/compression.html) can be enabled with the `perMessageDeflate` parameter.
|
||||
|
||||
@@ -231,7 +224,7 @@ ws.send("Hello world", true);
|
||||
|
||||
For fine-grained control over compression characteristics, refer to the [Reference](#reference).
|
||||
|
||||
## Backpressure
|
||||
### Backpressure
|
||||
|
||||
The `.send(message)` method of `ServerWebSocket` returns a `number` indicating the result of the operation.
|
||||
|
||||
@@ -241,6 +234,42 @@ The `.send(message)` method of `ServerWebSocket` returns a `number` indicating t
|
||||
|
||||
This gives you better control over backpressure in your server.
|
||||
|
||||
## Connect to a `Websocket` server
|
||||
|
||||
To connect to an external socket server, either from a browser or from Bun, create an instance of `WebSocket` with the constructor.
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000");
|
||||
```
|
||||
|
||||
In browsers, the cookies that are currently set on the page will be sent with the WebSocket upgrade request. This is a standard feature of the `WebSocket` API.
|
||||
|
||||
For convenience, Bun lets you setting custom headers directly in the constructor. This is a Bun-specific extension of the `WebSocket` standard. _This will not work in browsers._
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000", {
|
||||
headers: {
|
||||
// custom headers
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
To add event listeners to the socket:
|
||||
|
||||
```ts
|
||||
// message is received
|
||||
socket.addEventListener("message", event => {});
|
||||
|
||||
// socket opened
|
||||
socket.addEventListener("open", event => {});
|
||||
|
||||
// socket closed
|
||||
socket.addEventListener("close", event => {});
|
||||
|
||||
// error handler
|
||||
socket.addEventListener("error", event => {});
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
```ts
|
||||
@@ -248,7 +277,10 @@ namespace Bun {
|
||||
export function serve(params: {
|
||||
fetch: (req: Request, server: Server) => Response | Promise<Response>;
|
||||
websocket?: {
|
||||
message: (ws: ServerWebSocket, message: string | ArrayBuffer | Uint8Array) => void;
|
||||
message: (
|
||||
ws: ServerWebSocket,
|
||||
message: string | ArrayBuffer | Uint8Array,
|
||||
) => void;
|
||||
open?: (ws: ServerWebSocket) => void;
|
||||
close?: (ws: ServerWebSocket) => void;
|
||||
error?: (ws: ServerWebSocket, error: Error) => void;
|
||||
@@ -278,7 +310,11 @@ type Compressor =
|
||||
|
||||
interface Server {
|
||||
pendingWebsockets: number;
|
||||
publish(topic: string, data: string | ArrayBufferView | ArrayBuffer, compress?: boolean): number;
|
||||
publish(
|
||||
topic: string,
|
||||
data: string | ArrayBufferView | ArrayBuffer,
|
||||
compress?: boolean,
|
||||
): number;
|
||||
upgrade(
|
||||
req: Request,
|
||||
options?: {
|
||||
|
||||
166
docs/api/workers.md
Normal file
166
docs/api/workers.md
Normal file
@@ -0,0 +1,166 @@
|
||||
{% callout %}
|
||||
`Worker` support was added in Bun v0.6.15.
|
||||
{% /callout %}
|
||||
|
||||
[`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) lets you start and communicate with a new JavaScript instance running on a separate thread while sharing I/O resources with the main thread.
|
||||
|
||||
Bun implements a minimal version of the [Web Workers API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API) with extensions that make it work better for server-side use cases. Like the rest of Bun, `Worker` in Bun support CommonJS, ES Modules, TypeScript, JSX, TSX and more out of the box. No extra build steps are necessary.
|
||||
|
||||
## Creating a `Worker`
|
||||
|
||||
Like in browsers, [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) is a global. Use it to create a new worker thread.
|
||||
|
||||
From the main thread:
|
||||
|
||||
```js#Main_thread
|
||||
const workerURL = new URL("worker.ts", import.meta.url).href;
|
||||
const worker = new Worker(workerURL);
|
||||
|
||||
worker.postMessage("hello");
|
||||
worker.onmessage = event => {
|
||||
console.log(event.data);
|
||||
};
|
||||
```
|
||||
|
||||
Worker thread:
|
||||
|
||||
```ts#worker.ts_(Worker_thread)
|
||||
self.onmessage = (event: MessageEvent) => {
|
||||
console.log(event.data);
|
||||
postMessage("world");
|
||||
};
|
||||
```
|
||||
|
||||
You can use `import`/`export` syntax in your worker code. Unlike in browsers, there's no need to specify `{type: "module"}` to use ES Modules.
|
||||
|
||||
To simplify error handling, the initial script to load is resolved at the time `new Worker(url)` is called.
|
||||
|
||||
```js
|
||||
const worker = new Worker("/not-found.js");
|
||||
// throws an error immediately
|
||||
```
|
||||
|
||||
The specifier passed to `Worker` is resolved relative to the project root (like typing `bun ./path/to/file.js`).
|
||||
|
||||
### `"open"`
|
||||
|
||||
The `"open"` event is emitted when a worker is created and ready to receive messages. This can be used to send an initial message to a worker once it's ready. (This event does not exist in browsers.)
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
|
||||
worker.addEventListener("open", () => {
|
||||
console.log("worker is ready");
|
||||
});
|
||||
```
|
||||
|
||||
## Messages with `postMessage`
|
||||
|
||||
To send messages, use [`worker.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Worker/postMessage) and [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage). This leverages the [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm).
|
||||
|
||||
```js
|
||||
// On the worker thread, `postMessage` is automatically "routed" to the parent thread.
|
||||
postMessage({ hello: "world" });
|
||||
|
||||
// On the main thread
|
||||
worker.postMessage({ hello: "world" });
|
||||
```
|
||||
|
||||
To receive messages, use the [`message` event handler](https://developer.mozilla.org/en-US/docs/Web/API/Worker/message_event) on the worker and main thread.
|
||||
|
||||
```js
|
||||
// Worker thread:
|
||||
self.addEventListener("message", = event => {
|
||||
console.log(event.data);
|
||||
});
|
||||
// or use the setter:
|
||||
// self.onmessage = fn
|
||||
|
||||
// if on the main thread
|
||||
worker.addEventListener("message", = event => {
|
||||
console.log(event.data);
|
||||
});
|
||||
// or use the setter:
|
||||
// worker.onmessage = fn
|
||||
```
|
||||
|
||||
## Terminating a worker
|
||||
|
||||
A `Worker` instance terminate automatically when Bun's process exits. To terminate a `Worker` sooner, call `worker.terminate()`.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
|
||||
// ...some time later
|
||||
worker.terminate();
|
||||
```
|
||||
|
||||
### `process.exit()`
|
||||
|
||||
A worker can terminate itself with `process.exit()`. This does not terminate the main process. Like in Node.js, `process.on('beforeExit', callback)` and `process.on('exit', callback)` are emitted on the worker thread (and not on the main thread).
|
||||
|
||||
### `"close"`
|
||||
|
||||
The `"close"` event is emitted when a worker has been terminated. It can take some time for the worker to actually terminate, so this event is emitted when the worker has been marked as terminated.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
|
||||
worker.addEventListener("close", () => {
|
||||
console.log("worker is being closed");
|
||||
});
|
||||
```
|
||||
|
||||
This event does not exist in browsers.
|
||||
|
||||
## Managing lifetime
|
||||
|
||||
By default, an active `Worker` will _not_ keep the main (spawning) process alive. Once the main script finishes, the main thread will terminate, shutting down any workers it created.
|
||||
|
||||
### `worker.ref`
|
||||
|
||||
To keep the process alive until the `Worker` terminates, call `worker.ref()`. This couples the lifetime of the worker to the lifetime of the main process.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
worker.ref();
|
||||
```
|
||||
|
||||
Alternatively, you can also pass an `options` object to `Worker`:
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href, {
|
||||
bun: {
|
||||
ref: true,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### `worker.unref`
|
||||
|
||||
To stop keeping the process alive, call `worker.unref()`.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
worker.ref();
|
||||
// ...later on
|
||||
worker.unref();
|
||||
```
|
||||
|
||||
Note: `worker.ref()` and `worker.unref()` do not exist in browsers.
|
||||
|
||||
## Memory usage with `smol`
|
||||
|
||||
JavaScript instances can use a lot of memory. Bun's `Worker` supports a `smol` mode that reduces memory usage, at a cost of performance. To enable `smol` mode, pass `smol: true` to the `options` object in the `Worker` constructor.
|
||||
|
||||
```js
|
||||
const worker = new Worker("./i-am-smol.ts", {
|
||||
bun: {
|
||||
smol: true,
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
{% details summary="What does `smol` mode actually do?" %}
|
||||
Setting `smol: true` sets `JSC::HeapSize` to be `Small` instead of the default `Large`.
|
||||
{% /details %}
|
||||
@@ -47,6 +47,9 @@ registry = "https://registry.yarnpkg.com/"
|
||||
# Install for production? This is the equivalent to the "--production" CLI argument
|
||||
production = false
|
||||
|
||||
# Disallow changes to lockfile? This is the equivalent to the "--fozen-lockfile" CLI argument
|
||||
frozenLockfile = false
|
||||
|
||||
# Don't actually install
|
||||
dryRun = true
|
||||
|
||||
@@ -108,6 +111,7 @@ export interface Install {
|
||||
scopes: Scopes;
|
||||
registry: Registry;
|
||||
production: boolean;
|
||||
frozenLockfile: boolean;
|
||||
dryRun: boolean;
|
||||
optional: boolean;
|
||||
dev: boolean;
|
||||
|
||||
@@ -49,6 +49,12 @@ To install in production mode (i.e. without `devDependencies`):
|
||||
$ bun install --production
|
||||
```
|
||||
|
||||
To install with reproducible dependencies, use `--frozen-lockfile`. If your `package.json` disagrees with `bun.lockb`, Bun will exit with an error. This is useful for production builds and CI environments.
|
||||
|
||||
```bash
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
@@ -80,6 +86,9 @@ peer = false
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
```
|
||||
@@ -115,6 +124,26 @@ To add a package as an optional dependency (`"optionalDependencies"`):
|
||||
$ bun add --optional lodash
|
||||
```
|
||||
|
||||
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
|
||||
|
||||
```bash
|
||||
$ bun add react --exact
|
||||
```
|
||||
|
||||
This will add the following to your `package.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"dependencies": {
|
||||
// without --exact
|
||||
"react": "^18.2.0", // this matches >= 18.2.0 < 19.0.0
|
||||
|
||||
// with --exact
|
||||
"react": "18.2.0" // this matches only 18.2.0 exactly
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To install a package globally:
|
||||
|
||||
```bash
|
||||
@@ -185,7 +214,7 @@ $ cd /path/to/my-app
|
||||
$ bun link cool-pkg
|
||||
```
|
||||
|
||||
This will add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`.
|
||||
In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`:
|
||||
|
||||
```json-diff
|
||||
{
|
||||
@@ -197,6 +226,46 @@ This will add `cool-pkg` to the `dependencies` field of your app's package.json
|
||||
}
|
||||
```
|
||||
|
||||
## Trusted dependencies
|
||||
|
||||
Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts for installed dependencies, such as `postinstall`. These scripts represent a potential security risk, as they can execute arbitrary code on your machine.
|
||||
|
||||
<!-- Bun maintains an allow-list of popular packages containing `postinstall` scripts that are known to be safe. To run lifecycle scripts for packages that aren't on this list, add the package to `trustedDependencies` in your package.json. -->
|
||||
|
||||
To tell Bun to allow lifecycle scripts for a particular package, add the package to `trustedDependencies` in your package.json.
|
||||
|
||||
<!-- ```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
+ "trustedDependencies": {
|
||||
+ "my-trusted-package": "*"
|
||||
+ }
|
||||
}
|
||||
``` -->
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
+ "trustedDependencies": ["my-trusted-package"]
|
||||
}
|
||||
```
|
||||
|
||||
Bun reads this field and will run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
<!-- If you specify a version range, Bun will only execute lifecycle scripts if the resolved package version matches the range. -->
|
||||
<!--
|
||||
```json
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
"trustedDependencies": {
|
||||
"my-trusted-package": "^1.0.0"
|
||||
}
|
||||
}
|
||||
``` -->
|
||||
|
||||
## Git dependencies
|
||||
|
||||
To add a dependency from a git repository:
|
||||
|
||||
@@ -102,7 +102,7 @@ To debug environment variables, run `bun run env` to view a list of resolved env
|
||||
|
||||
Bun is designed to start fast and run fast.
|
||||
|
||||
Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. In most cases, the startup and running performance is faster than V8, the engine used by Node.js and Chromium-based browsers. It's transpiler and runtime are written in Zig, a modern, high-performance language. On Linux, this translates into startup times [4x faster](https://twitter.com/jarredsumner/status/1499225725492076544) than Node.js.
|
||||
Under the hood Bun uses the [JavaScriptCore engine](https://developer.apple.com/documentation/javascriptcore), which is developed by Apple for Safari. In most cases, the startup and running performance is faster than V8, the engine used by Node.js and Chromium-based browsers. Its transpiler and runtime are written in Zig, a modern, high-performance language. On Linux, this translates into startup times [4x faster](https://twitter.com/jarredsumner/status/1499225725492076544) than Node.js.
|
||||
|
||||
{% image src="/images/bun-run-speed.jpeg" caption="Bun vs Node.js vs Deno running Hello World" /%}
|
||||
|
||||
|
||||
@@ -65,6 +65,24 @@ $ bun test --preload ./setup.ts
|
||||
|
||||
See [Test > Lifecycle](/docs/test/lifecycle) for complete documentation.
|
||||
|
||||
## Mocks
|
||||
|
||||
Create mocks with the `mock` function. Mocks are automatically reset between tests.
|
||||
|
||||
```ts
|
||||
import { test, expect, mock } from "bun:test";
|
||||
const random = mock(() => Math.random());
|
||||
|
||||
test("random", async () => {
|
||||
const val = random();
|
||||
expect(val).toBeGreaterThan(0);
|
||||
expect(random).toHaveBeenCalled();
|
||||
expect(random).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
```
|
||||
|
||||
See [Test > Mocks](/docs/test/mocks) for complete documentation.
|
||||
|
||||
## Snapshot testing
|
||||
|
||||
Snapshots are supported by `bun test`. See [Test > Snapshots](/docs/test/snapshots) for complete documentation.
|
||||
@@ -77,6 +95,8 @@ Bun is compatible with popular UI testing libraries:
|
||||
- [DOM Testing Library](https://testing-library.com/docs/dom-testing-library/intro/)
|
||||
- [React Testing Library](https://testing-library.com/docs/react-testing-library/intro)
|
||||
|
||||
See [Test > DOM Testing](/docs/test/dom) for complete documentation.
|
||||
|
||||
## Performance
|
||||
|
||||
Bun's test runner is fast.
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
[Stric](https://github.com/bunsvr) is a minimalist, fast web framework for Bun.
|
||||
|
||||
```ts#index.ts
|
||||
import { App } from "@stricjs/core";
|
||||
import { Router } from '@stricjs/router';
|
||||
|
||||
// Export the fetch handler and serve with Bun
|
||||
export default new App()
|
||||
// Return "Hi!" on every request
|
||||
.use(() => new Response("Hi!"));
|
||||
export default new Router()
|
||||
// Return 'Hi' on every request
|
||||
.get('/', () => new Response('Hi'));
|
||||
```
|
||||
|
||||
Stric provides support for [ArrowJS](https://www.arrow-js.com), a library for building reactive interfaces in **native** JavaScript.
|
||||
Stric provides support for [ArrowJS](https://www.arrow-js.com), a library for building reactive interfaces.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#src/App.ts
|
||||
import { html } from "@stricjs/arrow/utils";
|
||||
import { html } from '@stricjs/arrow/utils';
|
||||
|
||||
// Code inside this function can use web APIs
|
||||
export function render() {
|
||||
@@ -23,10 +23,10 @@ export function render() {
|
||||
};
|
||||
|
||||
// Set the path to handle
|
||||
export const path = "/";
|
||||
export const path = '/';
|
||||
```
|
||||
```ts#index.ts
|
||||
import { PageRouter } from "@stricjs/arrow";
|
||||
import { PageRouter } from '@stricjs/arrow';
|
||||
|
||||
// Create a page router, build and serve directly
|
||||
new PageRouter().serve();
|
||||
|
||||
@@ -49,6 +49,12 @@ To install in production mode (i.e. without `devDependencies`):
|
||||
$ bun install --production
|
||||
```
|
||||
|
||||
To install dependencies without allowing changes to lockfile (useful on CI):
|
||||
|
||||
```bash
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
@@ -80,6 +86,9 @@ peer = false
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
```
|
||||
|
||||
@@ -24,3 +24,7 @@ To configure a private registry scoped to a particular organization:
|
||||
# registry with token
|
||||
"@myorg3" = { token = "$npm_token", url = "https://registry.myorg.com/" }
|
||||
```
|
||||
|
||||
### `.npmrc`
|
||||
|
||||
Bun does not currently read `.npmrc` files. For private registries, migrate your registry configuration to `bunfig.toml` as documented above.
|
||||
|
||||
@@ -3,17 +3,17 @@ Bun ships as a single executable that can be installed a few different ways.
|
||||
{% callout %}
|
||||
**Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
|
||||
|
||||
**Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
**Linux users** — The `unzip` package is required to install Bun. Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
{% /callout %}
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```bash#Native
|
||||
$ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
|
||||
```bash#NPM
|
||||
$ npm install -g bun # the last `npm` command you'll ever need
|
||||
```
|
||||
|
||||
```bash#npm
|
||||
$ npm install -g bun # the last `npm` command you'll ever need
|
||||
```bash#Native
|
||||
$ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
|
||||
```
|
||||
|
||||
```bash#Homebrew
|
||||
@@ -26,7 +26,7 @@ $ docker pull oven/bun
|
||||
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
|
||||
```
|
||||
|
||||
```bash#proto
|
||||
```bash#Proto
|
||||
$ proto install bun
|
||||
```
|
||||
|
||||
|
||||
22
docs/nav.ts
22
docs/nav.ts
@@ -135,13 +135,13 @@ export default {
|
||||
description:
|
||||
"Install all dependencies with `bun install`, or manage dependencies with `bun add` and `bun remove`.",
|
||||
}),
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and mono-repo development workflows.",
|
||||
}),
|
||||
page("install/cache", "Global cache", {
|
||||
description:
|
||||
"Bun's package manager installs all packages into a shared global cache to avoid redundant re-downloads.",
|
||||
}),
|
||||
page("install/workspaces", "Workspaces", {
|
||||
description: "Bun's package manager supports workspaces and mono-repo development workflows.",
|
||||
}),
|
||||
page("install/lockfile", "Lockfile", {
|
||||
description:
|
||||
"Bun's binary lockfile `bun.lockb` tracks your resolved dependency ytrr, making future installs fast and repeatable.",
|
||||
@@ -180,6 +180,9 @@ export default {
|
||||
page("cli/test", "`bun test`", {
|
||||
description: "Bun's test runner uses Jest-compatible syntax but runs 100x faster.",
|
||||
}),
|
||||
page("test/hot", "Watch mode", {
|
||||
description: "Reload your tests automatically on change.",
|
||||
}),
|
||||
page("test/writing", "Writing tests", {
|
||||
description:
|
||||
"Write your tests using Jest-like expect matchers, plus setup/teardown hooks, snapshot testing, and more",
|
||||
@@ -187,11 +190,17 @@ export default {
|
||||
page("test/lifecycle", "Lifecycle hooks", {
|
||||
description: "Add lifecycle hooks to your tests that run before/after each test or test run",
|
||||
}),
|
||||
page("test/mocks", "Mocks", {
|
||||
description: "Mocks functions and track method calls",
|
||||
}),
|
||||
page("test/snapshots", "Snapshots", {
|
||||
description: "Add lifecycle hooks to your tests that run before/after each test or test run",
|
||||
}),
|
||||
page("test/hot", "Watch mode", {
|
||||
description: "Reload your tests automatically on change.",
|
||||
page("test/time", "Dates and times", {
|
||||
description: "Control the date & time in your tests for more reliable and deterministic tests",
|
||||
}),
|
||||
page("test/dom", "DOM testing", {
|
||||
description: "Write headless tests for UI and React/Vue/Svelte/Lit components with happy-dom",
|
||||
}),
|
||||
|
||||
divider("Package runner"),
|
||||
@@ -233,6 +242,9 @@ export default {
|
||||
page("api/websockets", "WebSockets", {
|
||||
description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`,
|
||||
}), // "`Bun.serve`"),
|
||||
page("api/workers", "Workers", {
|
||||
description: `Run code in a separate thread with Bun's native Worker API.`,
|
||||
}), // "`Worker`"),
|
||||
page("api/binary-data", "Binary data", {
|
||||
description: `How to represent and manipulate binary data in Bun.`,
|
||||
}), // "`Bun.serve`"),
|
||||
|
||||
@@ -41,9 +41,7 @@ $ brew install llvm@15
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
# On Ubuntu 22.04 and newer, LLVM 15 is available in the default repositories
|
||||
$ sudo apt install llvm-15 lld-15
|
||||
# On older versions,
|
||||
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 15 all
|
||||
```
|
||||
|
||||
@@ -85,7 +83,7 @@ $ brew install automake ccache cmake coreutils esbuild gnu-sed go libiconv libto
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ sudo apt install cargo ccache cmake esbuild git golang libtool ninja-build pkg-config rustc
|
||||
$ sudo apt install cargo ccache cmake git golang libtool ninja-build pkg-config rustc esbuild
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
@@ -94,7 +92,19 @@ $ pacman -S base-devel ccache cmake esbuild git go libiconv libtool make ninja p
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
In addition to this, you will need either `bun` or `npm` installed to install the package.json dependencies.
|
||||
{% details summary="Ubuntu — Unable to locate package esbuild" %}
|
||||
|
||||
The `apt install esbuild` command may fail with an `Unable to locate package` error if you are using a Ubuntu mirror that does not contain an exact copy of the original Ubuntu server. Note that the same error may occur if you are not using any mirror but have the Ubuntu Universe enabled in the `sources.list`. In this case, you can install esbuild manually:
|
||||
|
||||
```bash
|
||||
$ curl -fsSL https://esbuild.github.io/dl/latest | sh
|
||||
$ chmod +x ./esbuild
|
||||
$ sudo mv ./esbuild /usr/local/bin
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
In addition to this, you will need an npm package manager (`bun`, `npm`, etc) to install the `package.json` dependencies.
|
||||
|
||||
## Install Zig
|
||||
|
||||
@@ -102,12 +112,12 @@ Zig can be installed either with our npm package [`@oven/zig`](https://www.npmjs
|
||||
|
||||
```bash
|
||||
$ bun install -g @oven/zig
|
||||
$ zigup master
|
||||
$ zigup 0.11.0-dev.4006+bf827d0b5
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
After cloning the repository, prepare bun to be built:
|
||||
After cloning the repository, run the following command. The runs
|
||||
|
||||
```bash
|
||||
$ make setup
|
||||
@@ -217,6 +227,37 @@ You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You
|
||||
$ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
|
||||
```
|
||||
|
||||
## Updating `WebKit`
|
||||
|
||||
The Bun team will occasionally bump the version of WebKit used in Bun. When this happens, you may see something like this with you run `git status`.
|
||||
|
||||
```bash
|
||||
$ git status
|
||||
On branch my-branch
|
||||
Changes not staged for commit:
|
||||
(use "git add <file>..." to update what will be committed)
|
||||
(use "git restore <file>..." to discard changes in working directory)
|
||||
modified: src/bun.js/WebKit (new commits)
|
||||
```
|
||||
|
||||
For performance reasons, `make submodule` does not automatically update the WebKit submodule. To update, run the following commands from the root of the Bun repo:
|
||||
|
||||
```bash
|
||||
$ bun install
|
||||
$ make regenerate-bindings
|
||||
```
|
||||
|
||||
<!-- Check the [Bun repo](https://github.com/oven-sh/bun/tree/main/src/bun.js) to get the hash of the commit of WebKit is currently being used.
|
||||
|
||||
{% image width="270" src="https://github.com/oven-sh/bun/assets/3084745/51730b73-89ef-4358-9a41-9563a60a54be" /%} -->
|
||||
|
||||
<!--
|
||||
```bash
|
||||
$ cd src/bun.js/WebKit
|
||||
$ git fetch
|
||||
$ git checkout <hash>
|
||||
``` -->
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### libarchive
|
||||
|
||||
@@ -85,6 +85,11 @@ Bun statically links these libraries:
|
||||
|
||||
---
|
||||
|
||||
- [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE)
|
||||
- BSD 2-Clause
|
||||
|
||||
---
|
||||
|
||||
- A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets)
|
||||
- Apache 2.0 licensed
|
||||
|
||||
|
||||
@@ -12,37 +12,6 @@ You can also create a global configuration file at the following paths:
|
||||
|
||||
If both a global and local `bunfig` are detected, the results are shallow-merged, with local overridding global. CLI flags will override `bunfig` setting where applicable.
|
||||
|
||||
## Environment variables
|
||||
|
||||
These environment variables are checked by Bun to detect functionality and toggle features.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Name
|
||||
- Description
|
||||
|
||||
---
|
||||
|
||||
- `TMPDIR`
|
||||
- Bun occasionally requires a directory to store intermediate assets during bundling or other operations. If unset, defaults to the platform-specific temporary directory: `/tmp` on Linux, `/private/tmp` on macOS.
|
||||
|
||||
---
|
||||
|
||||
- `NO_COLOR`
|
||||
- If `NO_COLOR=1`, then ANSI color output is [disabled](https://no-color.org/).
|
||||
|
||||
---
|
||||
|
||||
- `FORCE_COLOR`
|
||||
- If `FORCE_COLOR=1`, then ANSI color output is force enabled, even if `NO_COLOR` is set.
|
||||
|
||||
---
|
||||
|
||||
- `DO_NOT_TRACK`
|
||||
- If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data.
|
||||
|
||||
{% /table %}
|
||||
|
||||
## Runtime
|
||||
|
||||
```toml
|
||||
@@ -56,6 +25,9 @@ jsxFactory = "h"
|
||||
jsxFragment = "Fragment"
|
||||
jsxImportSource = "react"
|
||||
|
||||
# Reduce memory usage at the cost of performance
|
||||
smol = true
|
||||
|
||||
# Set a default framework to use
|
||||
# By default, Bun will look for an npm package like `bun-framework-${framework}`, followed by `${framework}`
|
||||
logLevel = "debug"
|
||||
@@ -77,33 +49,15 @@ logLevel = "debug"
|
||||
# It will recognize non-GUI editors, but I don't think it will work yet
|
||||
```
|
||||
|
||||
### Debugging
|
||||
|
||||
```toml
|
||||
[debug]
|
||||
# When navigating to a blob: or src: link, open the file in your editor
|
||||
# If not, it tries $EDITOR or $VISUAL
|
||||
# If that still fails, it will try Visual Studio Code, then Sublime Text, then a few others
|
||||
# This is used by Bun.openInEditor()
|
||||
editor = "code"
|
||||
|
||||
# List of editors:
|
||||
# - "subl", "sublime"
|
||||
# - "vscode", "code"
|
||||
# - "textmate", "mate"
|
||||
# - "idea"
|
||||
# - "webstorm"
|
||||
# - "nvim", "neovim"
|
||||
# - "vim","vi"
|
||||
# - "emacs"
|
||||
```
|
||||
|
||||
## Test runner
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# setup scripts to run before all test files
|
||||
preload = ["./setup.ts"]
|
||||
|
||||
# Reduce memory usage at the cost of performance
|
||||
smol = true
|
||||
```
|
||||
|
||||
## Package manager
|
||||
@@ -129,6 +83,9 @@ peer = false
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
```
|
||||
@@ -207,29 +164,64 @@ save = true
|
||||
print = "yarn"
|
||||
```
|
||||
|
||||
## Dev server (`bun dev`)
|
||||
|
||||
{% The `bun dev` command is likely to change soon and will likely be deprecated in an upcoming release. We recommend %}
|
||||
|
||||
Here is an example:
|
||||
### Debugging
|
||||
|
||||
```toml
|
||||
# Set a default framework to use
|
||||
# By default, Bun will look for an npm package like `bun-framework-${framework}`, followed by `${framework}`
|
||||
framework = "next"
|
||||
|
||||
[bundle]
|
||||
saveTo = "node_modules.bun"
|
||||
# Don't need this if `framework` is set, but showing it here as an example anyway
|
||||
entryPoints = ["./app/index.ts"]
|
||||
|
||||
[bundle.packages]
|
||||
# If you're bundling packages that do not actually live in a `node_modules` folder or do not have the full package name in the file path, you can pass this to bundle them anyway
|
||||
"@bigapp/design-system" = true
|
||||
|
||||
[dev]
|
||||
# Change the default port from 3000 to 5000
|
||||
# Also inherited by Bun.serve
|
||||
port = 5000
|
||||
[debug]
|
||||
# When navigating to a blob: or src: link, open the file in your editor
|
||||
# If not, it tries $EDITOR or $VISUAL
|
||||
# If that still fails, it will try Visual Studio Code, then Sublime Text, then a few others
|
||||
# This is used by Bun.openInEditor()
|
||||
editor = "code"
|
||||
|
||||
# List of editors:
|
||||
# - "subl", "sublime"
|
||||
# - "vscode", "code"
|
||||
# - "textmate", "mate"
|
||||
# - "idea"
|
||||
# - "webstorm"
|
||||
# - "nvim", "neovim"
|
||||
# - "vim","vi"
|
||||
# - "emacs"
|
||||
```
|
||||
|
||||
## Environment variables
|
||||
|
||||
These environment variables are checked by Bun to detect functionality and toggle features.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Name
|
||||
- Description
|
||||
|
||||
---
|
||||
|
||||
- `TMPDIR`
|
||||
- Bun occasionally requires a directory to store intermediate assets during bundling or other operations. If unset, defaults to the platform-specific temporary directory: `/tmp` on Linux, `/private/tmp` on macOS.
|
||||
|
||||
---
|
||||
|
||||
- `NO_COLOR`
|
||||
- If `NO_COLOR=1`, then ANSI color output is [disabled](https://no-color.org/).
|
||||
|
||||
---
|
||||
|
||||
- `FORCE_COLOR`
|
||||
- If `FORCE_COLOR=1`, then ANSI color output is force enabled, even if `NO_COLOR` is set.
|
||||
|
||||
---
|
||||
|
||||
- `DO_NOT_TRACK`
|
||||
- If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data.
|
||||
|
||||
{% /table %}
|
||||
|
||||
## smol mode
|
||||
|
||||
To reduce Bun's memory footprint in the runtime and test runner, pass `--smol`.
|
||||
|
||||
```bash
|
||||
$ bun --smol ./my-script.ts
|
||||
```
|
||||
|
||||
This configures JavaScriptCore (the engine) to use a smaller heap size and run the garbage collector more frequently. This is currently disabled by default for performance reasons, but it may become the default in the future. This feature was introduced in Bun v0.6.15.
|
||||
|
||||
@@ -7,7 +7,6 @@ Bun supports two kinds of automatic reloading via CLI flags:
|
||||
|
||||
Watch mode can be used with `bun test` or when running TypeScript, JSX, and JavaScript files.
|
||||
|
||||
|
||||
To run a file in `--watch` mode:
|
||||
|
||||
```bash
|
||||
@@ -17,7 +16,7 @@ $ bun --watch index.tsx
|
||||
To run your tests in `--watch` mode:
|
||||
|
||||
```bash
|
||||
$ bun --watch test
|
||||
$ bun --watch test
|
||||
```
|
||||
|
||||
In `--watch` mode, Bun keeps track of all imported files and watches them for changes. When a change is detected, Bun restarts the process, preserving the same set of CLI arguments and environment variables used in the initial run. If Bun crashes, `--watch` will attempt to automatically restart the process.
|
||||
@@ -58,7 +57,7 @@ serve({
|
||||
Running `bun test` in watch mode and `save-on-keypress` enabled:
|
||||
|
||||
```bash
|
||||
$ bun --watch test
|
||||
$ bun --watch test
|
||||
```
|
||||
|
||||

|
||||
|
||||
@@ -108,8 +108,8 @@ Once it finds the `foo` package, Bun reads the `package.json` to determine how t
|
||||
"worker": "./index.js",
|
||||
"module": "./index.js",
|
||||
"node": "./index.js",
|
||||
"browser": "./index.js",
|
||||
"default": "./index.js" // lowest priority
|
||||
"default": "./index.js",
|
||||
"browser": "./index.js" // lowest priority
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Bun aims for complete Node.js API compatibility. Most `npm` packages intended for `Node.js` environments will work with Bun out of the box; the best way to know for certain is to try it.
|
||||
|
||||
This page is updated regularly to reflect compatibility status of the latest version of Bun.
|
||||
This page is updated regularly to reflect compatibility status of the latest version of Bun. If you run into any bugs with a particular package, please [open an issue](https://bun.sh/issues). Opening issues for compatibility bugs helps us prioritize what to work on next.
|
||||
|
||||
## Built-in modules
|
||||
|
||||
@@ -26,8 +26,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
---
|
||||
|
||||
- {% anchor id="node_buffer" %} [`node:buffer`](https://nodejs.org/api/buffer.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Incomplete implementation of `base64` and `base64url` encodings.
|
||||
- 🟢
|
||||
|
||||
---
|
||||
|
||||
@@ -51,7 +50,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
- {% anchor id="node_crypto" %} [`node:crypto`](https://nodejs.org/api/crypto.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `crypto.Certificate` `crypto.ECDH` `crypto.KeyObject` `crypto.X509Certificate` `crypto.checkPrime{Sync}` `crypto.createPrivateKey` `crypto.createPublicKey` `crypto.createSecretKey` `crypto.diffieHellman` `crypto.generateKey{Sync}` `crypto.generateKeyPair{Sync}` `crypto.generatePrime{Sync}` `crypto.getCipherInfo` `crypto.getCurves` `crypto.{get|set}Fips` `crypto.hkdf` `crypto.hkdfSync` `crypto.randomInt` `crypto.secureHeapUsed` `crypto.setEngine` `crypto.sign` `crypto.verify`
|
||||
- Missing `crypto.Certificate` `crypto.ECDH` `crypto.KeyObject` `crypto.X509Certificate` `crypto.checkPrime{Sync}` `crypto.createPrivateKey` `crypto.createPublicKey` `crypto.createSecretKey` `crypto.diffieHellman` `crypto.generateKey{Sync}` `crypto.generateKeyPair{Sync}` `crypto.generatePrime{Sync}` `crypto.getCipherInfo` `crypto.{get|set}Fips` `crypto.hkdf` `crypto.hkdfSync` `crypto.secureHeapUsed` `crypto.setEngine` `crypto.sign` `crypto.verify`
|
||||
|
||||
---
|
||||
|
||||
@@ -87,7 +86,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
- {% anchor id="node_fs" %} [`node:fs`](https://nodejs.org/api/fs.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `fs.fdatasync{Sync}` `fs.opendir{Sync}` `fs.readv{Sync}` `fs.{watch|watchFile|unwatchFile}` `fs.writev{Sync}`.
|
||||
- Missing `fs.fdatasync{Sync}` `fs.opendir{Sync}` `fs.{watchFile|unwatchFile}` `fs.{cp|cpSync}`.
|
||||
|
||||
---
|
||||
|
||||
@@ -123,7 +122,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
- {% anchor id="node_net" %} [`node:net`](https://nodejs.org/api/net.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `net.createServer` `net.{get|set}DefaultAutoSelectFamily` `net.SocketAddress` `net.BlockList`.
|
||||
- Missing `net.{get|set}DefaultAutoSelectFamily` `net.SocketAddress` `net.BlockList`.
|
||||
|
||||
---
|
||||
|
||||
@@ -201,7 +200,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
- {% anchor id="node_tls" %} [`node:tls`](https://nodejs.org/api/tls.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `tls.Server` `tls.createServer` `tls.createSecurePair` `tls.checkServerIdentity` `tls.rootCertificates`
|
||||
- Missing `tls.createSecurePair` `tls.rootCertificates`
|
||||
|
||||
---
|
||||
|
||||
@@ -219,19 +218,19 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
- {% anchor id="node_url" %} [`node:url`](https://nodejs.org/api/url.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `url.domainTo{ASCII|Unicode}` `url.urlToHttpOptions`. Recommended to use `URL` and `URLSearchParams` globals instead.
|
||||
- Missing `url.domainTo{ASCII|Unicode}`. Recommended to use `URL` and `URLSearchParams` globals instead.
|
||||
|
||||
---
|
||||
|
||||
- {% anchor id="node_util" %} [`node:util`](https://nodejs.org/api/util.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `util.MIMEParams` `util.MIMEType` `util.formatWithOptions()` `util.getSystemErrorMap()` `util.getSystemErrorName()` `util.parseArgs()` `util.stripVTControlCharacters()` `util.toUSVString()` `util.transferableAbortController()` `util.transferableAbortSignal()`.
|
||||
- Missing `util.MIMEParams` `util.MIMEType` `util.formatWithOptions()` `util.getSystemErrorMap()` `util.getSystemErrorName()` `util.parseArgs()` `util.stripVTControlCharacters()` `util.transferableAbortController()` `util.transferableAbortSignal()`.
|
||||
|
||||
---
|
||||
|
||||
- {% anchor id="node_v8" %} [`node:v8`](https://nodejs.org/api/v8.html) {% /anchor %}
|
||||
- 🔴
|
||||
- Not implemented or planned. For profiling, use [`bun:jsc`](/docs/project/benchmarking#bunjsc) instead.
|
||||
- `serialize` and `deserialize` use JavaScriptCore's wire format instead of V8's. Otherwise, not implemented. For profiling, use [`bun:jsc`](/docs/project/benchmarking#bunjsc) instead.
|
||||
|
||||
---
|
||||
|
||||
@@ -510,7 +509,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
- {% anchor id="node_process" %} [`process`](https://nodejs.org/api/process.html) {% /anchor %}
|
||||
- 🟡
|
||||
- Missing `process.allowedNodeEnvironmentFlags` `process.channel()` `process.connected` `process.constrainedMemory()` `process.cpuUsage()` `process.debugPort` `process.disconnect()` `process.{get|set}ActiveResourcesInfo()` `process.{get|set}{uid|gid|egid|euid|groups}()` `process.hasUncaughtExceptionCaptureCallback` `process.initGroups()` `process.kill()` `process.listenerCount` `process.memoryUsage()` `process.report` `process.resourceUsage()` `process.setSourceMapsEnabled()` `process.send()`.
|
||||
- Missing `process.allowedNodeEnvironmentFlags` `process.channel()` `process.connected` `process.constrainedMemory()` `process.disconnect()` `process.getActiveResourcesInfo/setActiveResourcesInfo()` `process.setuid/setgid/setegid/seteuid/setgroups()` `process.hasUncaughtExceptionCaptureCallback` `process.initGroups()` `process.report` `process.resourceUsage()` `process.send()`.
|
||||
|
||||
---
|
||||
|
||||
@@ -558,7 +557,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
- {% anchor id="node_require" %} [`require()`](https://nodejs.org/api/globals.html#require) {% /anchor %}
|
||||
- 🟢
|
||||
- Fully implemented.
|
||||
- Fully implemented, as well as [`require.main`](https://nodejs.org/api/modules.html#requiremain), [`require.cache`](https://nodejs.org/api/modules.html#requirecache), and [`require.resolve`](https://nodejs.org/api/modules.html#requireresolverequest-options)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -76,6 +76,7 @@ These are the recommended `compilerOptions` for a Bun project.
|
||||
|
||||
// if TS 5.x+
|
||||
"moduleResolution": "bundler",
|
||||
"noEmit": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"moduleDetection": "force",
|
||||
// if TS 4.x or earlier
|
||||
@@ -93,6 +94,17 @@ These are the recommended `compilerOptions` for a Bun project.
|
||||
}
|
||||
```
|
||||
|
||||
### Add DOM types
|
||||
|
||||
Settings `"types": ["bun-types"]` means TypeScript will ignore other global type definitions, including `lib: ["dom"]`. To add DOM types into your project, add the following [triple-slash directives](https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html) at the top of any TypeScript file in your project.
|
||||
|
||||
```ts
|
||||
/// <reference lib="dom" />
|
||||
/// <reference lib="dom.iterable" />
|
||||
```
|
||||
|
||||
The same applies to other global type definition _libs_ like `webworker`.
|
||||
|
||||
## Path mapping
|
||||
|
||||
When resolving modules, Bun's runtime respects path mappings defined in [`compilerOptions.paths`](https://www.typescriptlang.org/tsconfig#paths) in your `tsconfig.json`. No other runtime does this.
|
||||
|
||||
@@ -16,6 +16,13 @@ The following Web APIs are partially or completely supported.
|
||||
|
||||
---
|
||||
|
||||
---
|
||||
|
||||
- Web Workers
|
||||
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage) [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone)
|
||||
|
||||
---
|
||||
|
||||
- Streams
|
||||
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
|
||||
|
||||
|
||||
75
docs/test/dom.md
Normal file
75
docs/test/dom.md
Normal file
@@ -0,0 +1,75 @@
|
||||
Bun's test runner plays well with existing component and DOM testing libraries, including React Testing Library and [`happy-dom`](https://github.com/capricorn86/happy-dom).
|
||||
|
||||
## `happy-dom`
|
||||
|
||||
For writing headless tests for your frontend code and components, we recommend [`happy-dom`](https://github.com/capricorn86/happy-dom). Happy DOM implements a complete set of HTML and DOM APIs in plain JavaScript, making it possible to simulate a browser environment with high fidelity.
|
||||
|
||||
To get started install the `@happy-dom/global-registrator` package as a dev dependency.
|
||||
|
||||
```bash
|
||||
$ bun add -d @happy-dom/global-registrator
|
||||
```
|
||||
|
||||
We'll be using Bun's _preload_ functionality to register the `happy-dom` globals before running our tests. This step will make browser APIs like `document` available in the global scope. Create a file called `happydom.ts` in the root of your project and add the following code:
|
||||
|
||||
```ts
|
||||
import { GlobalRegistrator } from "@happy-dom/global-registrator";
|
||||
|
||||
GlobalRegistrator.register();
|
||||
```
|
||||
|
||||
To preload this file before `bun test`, open or create a `bunfig.toml` file and add the following lines.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
preload = "./happydom.ts"
|
||||
```
|
||||
|
||||
This will execute `happydom.ts` when you run `bun test`. Now you can write tests that use browser APIs like `document` and `window`.
|
||||
|
||||
```ts#dom.test.ts
|
||||
import {test, expect} from 'bun:test';
|
||||
|
||||
test('dom test', () => {
|
||||
document.body.innerHTML = `<button>My button</button>`;
|
||||
const button = document.querySelector('button');
|
||||
expect(button?.innerText).toEqual('My button');
|
||||
});
|
||||
```
|
||||
|
||||
Depending on your `tsconfig.json` setup, you may see a `"Cannot find name 'document'"` type error in the code above. To "inject" the types for `document` and other browser APIs, add the following [triple-slash directive](https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html) to the top of any test file.
|
||||
|
||||
```ts-diff#dom.test.ts
|
||||
+ /// <reference lib="dom" />
|
||||
|
||||
import {test, expect} from 'bun:test';
|
||||
|
||||
test('dom test', () => {
|
||||
document.body.innerHTML = `<button>My button</button>`;
|
||||
const button = document.querySelector('button');
|
||||
expect(button?.innerText).toEqual('My button');
|
||||
});
|
||||
```
|
||||
|
||||
Let's run this test with `bun test`:
|
||||
|
||||
```bash
|
||||
$ bun test
|
||||
bun test v0.x.y
|
||||
|
||||
dom.test.ts:
|
||||
✓ dom test [0.82ms]
|
||||
|
||||
1 pass
|
||||
0 fail
|
||||
1 expect() calls
|
||||
Ran 1 tests across 1 files. 1 total [125.00ms]
|
||||
```
|
||||
|
||||
<!-- ## React Testing Library
|
||||
|
||||
Once you've set up `happy-dom` as described above, you can use it with React Testing Library. To get started, install the `@testing-library/react` package as a dev dependency.
|
||||
|
||||
```bash
|
||||
$ bun add -d @testing-library/react
|
||||
``` -->
|
||||
@@ -10,7 +10,7 @@ The test runner supports the following lifecycle hooks. This is useful for loadi
|
||||
Perform per-test setup and teardown logic with `beforeEach` and `afterEach`.
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
import { beforeEach, afterEach } from "bun:test";
|
||||
|
||||
beforeEach(() => {
|
||||
console.log("running test.");
|
||||
@@ -70,7 +70,7 @@ afterAll(() => {
|
||||
Then use `--preload` to run the setup script before any test files.
|
||||
|
||||
```ts
|
||||
bun test --preload ./setup.ts
|
||||
$ bun test --preload ./setup.ts
|
||||
```
|
||||
|
||||
To avoid typing `--preload` every time you run tests, it can be added to your `bunfig.toml`:
|
||||
|
||||
55
docs/test/mocks.md
Normal file
55
docs/test/mocks.md
Normal file
@@ -0,0 +1,55 @@
|
||||
Create mocks with the `mock` function.
|
||||
|
||||
```ts
|
||||
import { test, expect, mock } from "bun:test";
|
||||
const random = mock(() => Math.random());
|
||||
|
||||
test("random", async () => {
|
||||
const val = random();
|
||||
expect(val).toBeGreaterThan(0);
|
||||
expect(random).toHaveBeenCalled();
|
||||
expect(random).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
```
|
||||
|
||||
The result of `mock()` is a new function that's been decorated with some additional properties.
|
||||
|
||||
```ts
|
||||
import { mock } from "bun:test";
|
||||
const random = mock((multiplier: number) => multiplier * Math.random());
|
||||
|
||||
random(2);
|
||||
random(10);
|
||||
|
||||
random.mock.calls;
|
||||
// [[ 2 ], [ 10 ]]
|
||||
|
||||
random.mock.results;
|
||||
// [
|
||||
// { type: "return", value: 0.6533907460954099 },
|
||||
// { type: "return", value: 0.6452713933037312 }
|
||||
// ]
|
||||
```
|
||||
|
||||
## `.spyOn()`
|
||||
|
||||
It's possible to track calls to a function without replacing it with a mock. Use `spyOn()` to create a spy; these spies can be passed to `.toHaveBeenCalled()` and `.toHaveBeenCalledTimes()`.
|
||||
|
||||
```ts
|
||||
import { test, expect, spyOn } from "bun:test";
|
||||
|
||||
const ringo = {
|
||||
name: "Ringo",
|
||||
sayHi() {
|
||||
console.log(`Hello I'm ${this.name}`);
|
||||
},
|
||||
};
|
||||
|
||||
const spy = spyOn(ringo, "sayHi");
|
||||
|
||||
test("spyon", () => {
|
||||
expect(spy).toHaveBeenCalledTimes(0);
|
||||
ringo.sayHi();
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
```
|
||||
106
docs/test/time.md
Normal file
106
docs/test/time.md
Normal file
@@ -0,0 +1,106 @@
|
||||
`bun:test` lets you change what time it is in your tests. This was introduced in Bun v0.6.13.
|
||||
|
||||
This works with any of the following:
|
||||
|
||||
- `Date.now`
|
||||
- `new Date()`
|
||||
- `new Intl.DateTimeFormat().format()`
|
||||
|
||||
Timers are not impacted yet, but may be in a future release of Bun.
|
||||
|
||||
## `setSystemTime`
|
||||
|
||||
To change the system time, use `setSystemTime`:
|
||||
|
||||
```ts
|
||||
import { setSystemTime, beforeAll, test, expect } from "bun:test";
|
||||
|
||||
beforeAll(() => {
|
||||
setSystemTime(new Date("2020-01-01T00:00:00.000Z"));
|
||||
});
|
||||
|
||||
test("it is 2020", () => {
|
||||
expect(new Date().getFullYear()).toBe(2020);
|
||||
});
|
||||
```
|
||||
|
||||
To support existing tests that use Jest's `useFakeTimers` and `useRealTimers`, you can use `useFakeTimers` and `useRealTimers`:
|
||||
|
||||
```ts
|
||||
test("just like in jest", () => {
|
||||
jest.useFakeTimers();
|
||||
jest.setSystemTime(new Date("2020-01-01T00:00:00.000Z"));
|
||||
expect(new Date().getFullYear()).toBe(2020);
|
||||
jest.useRealTimers();
|
||||
expect(new Date().getFullYear()).toBeGreaterThan(2020);
|
||||
});
|
||||
|
||||
test("unlike in jest", () => {
|
||||
const OriginalDate = Date;
|
||||
jest.useFakeTimers();
|
||||
if (typeof Bun === "undefined") {
|
||||
// In Jest, the Date constructor changes
|
||||
// That can cause all sorts of bugs because suddenly Date !== Date before the test.
|
||||
expect(Date).not.toBe(OriginalDate);
|
||||
expect(Date.now).not.toBe(OriginalDate.now);
|
||||
} else {
|
||||
// In bun:test, Date constructor does not change when you useFakeTimers
|
||||
expect(Date).toBe(OriginalDate);
|
||||
expect(Date.now).toBe(OriginalDate.now);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Timers** — Note that we have not implemented builtin support for mocking timers yet, but this is on the roadmap.
|
||||
{% /callout %}
|
||||
|
||||
### Reset the system time
|
||||
|
||||
To reset the system time, pass no arguments to `setSystemTime`:
|
||||
|
||||
```ts
|
||||
import { setSystemTime, beforeAll } from "bun:test";
|
||||
|
||||
test("it was 2020, for a moment.", () => {
|
||||
// Set it to something!
|
||||
setSystemTime(new Date("2020-01-01T00:00:00.000Z"));
|
||||
expect(new Date().getFullYear()).toBe(2020);
|
||||
|
||||
// reset it!
|
||||
setSystemTime();
|
||||
|
||||
expect(new Date().getFullYear()).toBeGreaterThan(2020);
|
||||
});
|
||||
```
|
||||
|
||||
## Set the time zone
|
||||
|
||||
To change the time zone, either pass the `$TZ` environment variable to `bun test`.
|
||||
|
||||
```sh
|
||||
TZ=America/Los_Angeles bun test
|
||||
```
|
||||
|
||||
Or set `process.env.TZ` at runtime:
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
test("Welcome to California!", () => {
|
||||
process.env.TZ = "America/Los_Angeles";
|
||||
expect(new Date().getTimezoneOffset()).toBe(420);
|
||||
expect(new Intl.DateTimeFormat().resolvedOptions().timeZone).toBe(
|
||||
"America/Los_Angeles",
|
||||
);
|
||||
});
|
||||
|
||||
test("Welcome to New York!", () => {
|
||||
// Unlike in Jest, you can set the timezone multiple times at runtime and it will work.
|
||||
process.env.TZ = "America/New_York";
|
||||
expect(new Date().getTimezoneOffset()).toBe(240);
|
||||
expect(new Intl.DateTimeFormat().resolvedOptions().timeZone).toBe(
|
||||
"America/New_York",
|
||||
);
|
||||
});
|
||||
```
|
||||
@@ -63,6 +63,21 @@ test("2 * 2", done => {
|
||||
});
|
||||
```
|
||||
|
||||
## Timeouts
|
||||
|
||||
Optionally specify a per-test timeout in milliseconds by passing a number as the third argument to `test`.
|
||||
|
||||
```ts
|
||||
import { test } from "bun:test";
|
||||
|
||||
test("wat", async () => {
|
||||
const data = await slowOperation();
|
||||
expect(data).toBe(42);
|
||||
}, 500); // test must run in <500ms
|
||||
```
|
||||
|
||||
## `test.skip`
|
||||
|
||||
Skip individual tests with `test.skip`. These tests will not be run.
|
||||
|
||||
```ts
|
||||
@@ -74,6 +89,8 @@ test.skip("wat", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## `test.todo`
|
||||
|
||||
Mark a test as a todo with `test.todo`. These tests _will_ be run, and the test runner will expect them to fail. If they pass, you will be prompted to mark it as a regular test.
|
||||
|
||||
```ts
|
||||
@@ -84,6 +101,71 @@ test.todo("fix this", () => {
|
||||
});
|
||||
```
|
||||
|
||||
To exlusively run tests marked as _todo_, use `bun test --todo`.
|
||||
|
||||
```sh
|
||||
$ bun test --todo
|
||||
```
|
||||
|
||||
## `test.only`
|
||||
|
||||
To run a particular test or suite of tests use `test.only()` or `describe.only()`. Once declared, running `bun test --skip` will only execute tests/suites that have been marked with `.only()`.
|
||||
|
||||
```ts
|
||||
import { test, describe } from "bun:test";
|
||||
|
||||
test("test #1", () => {
|
||||
// does not run
|
||||
});
|
||||
|
||||
test.only("test #2", () => {
|
||||
// runs
|
||||
});
|
||||
|
||||
describe.only("only", () => {
|
||||
test("test #3", () => {
|
||||
// runs
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
The following command will only execute tests #2 and #3.
|
||||
|
||||
```sh
|
||||
$ bun test --only
|
||||
```
|
||||
|
||||
## `test.if`
|
||||
|
||||
To run a test conditionally, use `test.if()`. The test will run if the condition is truthy. This is particularly useful for tests that should only run on specific architectures or operating systems.
|
||||
|
||||
```ts
|
||||
test.if(Math.random() > 0.5)("runs half the time", () => {
|
||||
// ...
|
||||
});
|
||||
```
|
||||
|
||||
```ts
|
||||
test.if(Math.random() > 0.5)("runs half the time", () => {
|
||||
// ...
|
||||
});
|
||||
|
||||
const macOS = process.arch === "darwin";
|
||||
test.if(macOS)("runs on macOS", () => {
|
||||
// runs if macOS
|
||||
});
|
||||
```
|
||||
|
||||
To instead skip a test based on some condition, use `test.skipIf()` or `describe.skipIf()`.
|
||||
|
||||
```ts
|
||||
const macOS = process.arch === "darwin";
|
||||
|
||||
test.skipIf(macOS)("runs on non-macOS", () => {
|
||||
// runs if *not* macOS
|
||||
});
|
||||
```
|
||||
|
||||
## Matchers
|
||||
|
||||
Bun implements the following matchers. Full Jest compatibility is on the roadmap; track progress [here](https://github.com/oven-sh/bun/issues/1825).
|
||||
@@ -167,12 +249,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.anything()`](https://jestjs.io/docs/expect#expectanything)
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.any()`](https://jestjs.io/docs/expect#expectanyconstructor)
|
||||
|
||||
---
|
||||
@@ -202,12 +284,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.stringContaining()`](https://jestjs.io/docs/expect#expectstringcontainingstring)
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.stringMatching()`](https://jestjs.io/docs/expect#expectstringmatchingstring--regexp)
|
||||
|
||||
---
|
||||
@@ -217,22 +299,22 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- [`.resolves()`](https://jestjs.io/docs/expect#resolves)
|
||||
- 🟢
|
||||
- [`.resolves()`](https://jestjs.io/docs/expect#resolves) (since Bun v0.6.12+)
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- [`.rejects()`](https://jestjs.io/docs/expect#rejects)
|
||||
- 🟢
|
||||
- [`.rejects()`](https://jestjs.io/docs/expect#rejects) (since Bun v0.6.12+)
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.toHaveBeenCalled()`](https://jestjs.io/docs/expect#tohavebeencalled)
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.toHaveBeenCalledTimes()`](https://jestjs.io/docs/expect#tohavebeencalledtimesnumber)
|
||||
|
||||
---
|
||||
@@ -277,7 +359,7 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.toBeCloseTo()`](https://jestjs.io/docs/expect#tobeclosetonumber-numdigits)
|
||||
|
||||
---
|
||||
@@ -312,12 +394,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.toMatch()`](https://jestjs.io/docs/expect#tomatchregexp--string)
|
||||
|
||||
---
|
||||
|
||||
- 🔴
|
||||
- 🟢
|
||||
- [`.toMatchObject()`](https://jestjs.io/docs/expect#tomatchobjectobject)
|
||||
|
||||
---
|
||||
|
||||
@@ -3,7 +3,8 @@ import { parse } from "querystring";
|
||||
|
||||
export default {
|
||||
fetch(req) {
|
||||
if (req.url === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/favicon.ico") return new Response("nooo dont open favicon in editor", { status: 404 });
|
||||
|
||||
var pathname = req.url.substring(1);
|
||||
const q = pathname.indexOf("?");
|
||||
|
||||
@@ -91,7 +91,7 @@ pub const Arguments = struct {
|
||||
var raw_args: std.ArrayListUnmanaged(string) = undefined;
|
||||
|
||||
if (positionals.len > 0) {
|
||||
raw_args = .{ .capacity = positionals.len, .items = @intToPtr([*][]const u8, @ptrToInt(positionals.ptr))[0..positionals.len] };
|
||||
raw_args = .{ .capacity = positionals.len, .items = @as([*][]const u8, @ptrFromInt(@intFromPtr(positionals.ptr)))[0..positionals.len] };
|
||||
} else {
|
||||
raw_args = .{};
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ pub const Arguments = struct {
|
||||
var raw_args: std.ArrayListUnmanaged(string) = undefined;
|
||||
|
||||
if (positionals.len > 0) {
|
||||
raw_args = .{ .capacity = positionals.len, .items = @intToPtr([*][]const u8, @ptrToInt(positionals.ptr))[0..positionals.len] };
|
||||
raw_args = .{ .capacity = positionals.len, .items = @as([*][]const u8, @ptrFromInt(@intFromPtr(positionals.ptr)))[0..positionals.len] };
|
||||
} else {
|
||||
raw_args = .{};
|
||||
}
|
||||
@@ -244,7 +244,7 @@ pub fn main() anyerror!void {
|
||||
const http = channel.readItem() catch continue;
|
||||
read_count += 1;
|
||||
|
||||
Output.printElapsed(@floatCast(f64, @intToFloat(f128, http.elapsed) / std.time.ns_per_ms));
|
||||
Output.printElapsed(@as(f64, @floatCast(@as(f128, @floatFromInt(http.elapsed)) / std.time.ns_per_ms)));
|
||||
if (http.response) |resp| {
|
||||
if (resp.status_code == 200) {
|
||||
success_count += 1;
|
||||
@@ -270,7 +270,7 @@ pub fn main() anyerror!void {
|
||||
http.client.url.href,
|
||||
http.response_buffer.list.items.len,
|
||||
});
|
||||
Output.printElapsed(@floatCast(f64, @intToFloat(f128, http.gzip_elapsed) / std.time.ns_per_ms));
|
||||
Output.printElapsed(@as(f64, @floatCast(@as(f128, @floatFromInt(http.gzip_elapsed)) / std.time.ns_per_ms)));
|
||||
Output.prettyError("<d> gzip)<r>\n", .{});
|
||||
} else {
|
||||
Output.prettyError(" <d>{s}<r><d> - {s}<r> <d>({d} bytes)<r>\n", .{
|
||||
@@ -295,7 +295,7 @@ pub fn main() anyerror!void {
|
||||
fail_count,
|
||||
});
|
||||
|
||||
Output.printElapsed(@floatCast(f64, @intToFloat(f128, timer.read()) / std.time.ns_per_ms));
|
||||
Output.printElapsed(@as(f64, @floatCast(@as(f128, @floatFromInt(timer.read())) / std.time.ns_per_ms)));
|
||||
Output.prettyErrorln(" {d} requests", .{
|
||||
read_count,
|
||||
});
|
||||
|
||||
@@ -22,9 +22,10 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.0.25",
|
||||
"@types/ws": "^8.5.5",
|
||||
"@typescript-eslint/eslint-plugin": "^5.31.0",
|
||||
"@typescript-eslint/parser": "^5.31.0",
|
||||
"bun-webkit": "0.0.1-b2f1006a06f81bc860c89dd4c7cec3e7117c4c4c"
|
||||
"bun-webkit": "0.0.1-dcaa801946a9526c0c4a40dceb9168b81aeb7973"
|
||||
},
|
||||
"version": "0.0.0",
|
||||
"prettier": "./.prettierrc.cjs"
|
||||
|
||||
@@ -4,9 +4,17 @@ import { fsyncSync, rmSync, writeFileSync, writeSync } from "fs";
|
||||
import { readdirSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import { StringDecoder } from "node:string_decoder";
|
||||
import { totalmem } from "os";
|
||||
import { relative } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const nativeMemory = totalmem();
|
||||
const BUN_JSC_forceRAMSizeNumber = parseInt(process.env["BUN_JSC_forceRAMSize"] || "0", 10);
|
||||
let BUN_JSC_forceRAMSize = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
|
||||
if (!(Number.isSafeInteger(BUN_JSC_forceRAMSizeNumber) && BUN_JSC_forceRAMSizeNumber > 0)) {
|
||||
BUN_JSC_forceRAMSize = BUN_JSC_forceRAMSizeNumber + "";
|
||||
}
|
||||
|
||||
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
|
||||
process.chdir(cwd);
|
||||
|
||||
@@ -39,6 +47,8 @@ async function runTest(path) {
|
||||
env: {
|
||||
...process.env,
|
||||
FORCE_COLOR: "1",
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "1",
|
||||
BUN_JSC_forceRAMSize,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
|
||||
6
packages/bun-types/assert.d.ts
vendored
6
packages/bun-types/assert.d.ts
vendored
@@ -931,7 +931,11 @@ declare module "assert" {
|
||||
* instance of an `Error` then it will be thrown instead of the `AssertionError`.
|
||||
*/
|
||||
// FIXME: assert.doesNotMatch is typed, but not in the browserify polyfill?
|
||||
// function doesNotMatch(value: string, regExp: RegExp, message?: string | Error): void;
|
||||
function doesNotMatch(
|
||||
value: string,
|
||||
regExp: RegExp,
|
||||
message?: string | Error,
|
||||
): void;
|
||||
|
||||
const strict: Omit<
|
||||
typeof assert,
|
||||
|
||||
24
packages/bun-types/buffer.d.ts
vendored
24
packages/bun-types/buffer.d.ts
vendored
@@ -2084,6 +2084,30 @@ declare module "buffer" {
|
||||
values(): IterableIterator<number>;
|
||||
}
|
||||
var Buffer: BufferConstructor;
|
||||
|
||||
/**
|
||||
* This function returns `true` if `input` contains only valid UTF-8-encoded data,
|
||||
* including the case in which `input` is empty.
|
||||
*
|
||||
* Throws if the `input` is a detached array buffer.
|
||||
* @since Bun v0.6.13
|
||||
* @param input The input to validate.
|
||||
*/
|
||||
export function isUtf8(
|
||||
input: TypedArray | ArrayBufferLike | DataView,
|
||||
): boolean;
|
||||
|
||||
/**
|
||||
* This function returns `true` if `input` contains only valid ASCII-encoded data,
|
||||
* including the case in which `input` is empty.
|
||||
*
|
||||
* Throws if the `input` is a detached array buffer.
|
||||
* @since Bun v0.6.13
|
||||
* @param input The input to validate.
|
||||
*/
|
||||
export function isAscii(
|
||||
input: TypedArray | ArrayBufferLike | DataView,
|
||||
): boolean;
|
||||
}
|
||||
}
|
||||
declare module "node:buffer" {
|
||||
|
||||
619
packages/bun-types/bun-test.d.ts
vendored
619
packages/bun-types/bun-test.d.ts
vendored
@@ -15,6 +15,107 @@
|
||||
*/
|
||||
|
||||
declare module "bun:test" {
|
||||
type AnyFunction = (...args: any) => any;
|
||||
/**
|
||||
* -- Mocks --
|
||||
*/
|
||||
export interface Mock<T extends AnyFunction>
|
||||
extends JestMock.MockInstance<T> {
|
||||
(...args: Parameters<T>): ReturnType<T>;
|
||||
}
|
||||
type _Mock<T extends AnyFunction> = Mock<T>;
|
||||
|
||||
export const mock: {
|
||||
<T extends AnyFunction>(Function: T): Mock<T>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Control the system time used by:
|
||||
* - `Date.now()`
|
||||
* - `new Date()`
|
||||
* - `Intl.DateTimeFormat().format()`
|
||||
*
|
||||
* In the future, we may add support for more functions, but we haven't done that yet.
|
||||
*
|
||||
* @param now The time to set the system time to. If not provided, the system time will be reset.
|
||||
* @returns `this`
|
||||
* @since v0.6.13
|
||||
*
|
||||
* ## Set Date to a specific time
|
||||
*
|
||||
* ```js
|
||||
* import { setSystemTime } from 'bun:test';
|
||||
*
|
||||
* setSystemTime(new Date('2020-01-01T00:00:00.000Z'));
|
||||
* console.log(new Date().toISOString()); // 2020-01-01T00:00:00.000Z
|
||||
* ```
|
||||
* ## Reset Date to the current time
|
||||
*
|
||||
* ```js
|
||||
* import { setSystemTime } from 'bun:test';
|
||||
*
|
||||
* setSystemTime();
|
||||
* ```
|
||||
*/
|
||||
export function setSystemTime(now?: Date | number): ThisType<void>;
|
||||
|
||||
interface Jest {
|
||||
restoreAllMocks(): void;
|
||||
fn<T extends AnyFunction>(func?: T): Mock<T>;
|
||||
}
|
||||
export const jest: Jest;
|
||||
export namespace jest {
|
||||
/**
|
||||
* Constructs the type of a mock function, e.g. the return type of `jest.fn()`.
|
||||
*/
|
||||
type Mock<T extends AnyFunction = AnyFunction> = _Mock<T>;
|
||||
/**
|
||||
* Wraps a class, function or object type with Jest mock type definitions.
|
||||
*/
|
||||
// type Mocked<T extends object> = JestMock.Mocked<T>;
|
||||
/**
|
||||
* Wraps a class type with Jest mock type definitions.
|
||||
*/
|
||||
// type MockedClass<T extends JestMock.ClassLike> = JestMock.MockedClass<T>;
|
||||
/**
|
||||
* Wraps a function type with Jest mock type definitions.
|
||||
*/
|
||||
// type MockedFunction<T extends AnyFunction> = JestMock.MockedFunction<T>;
|
||||
/**
|
||||
* Wraps an object type with Jest mock type definitions.
|
||||
*/
|
||||
// type MockedObject<T extends object> = JestMock.MockedObject<T>;
|
||||
/**
|
||||
* Constructs the type of a replaced property.
|
||||
*/
|
||||
type Replaced<T> = JestMock.Replaced<T>;
|
||||
/**
|
||||
* Constructs the type of a spied class or function.
|
||||
*/
|
||||
type Spied<T extends JestMock.ClassLike | AnyFunction> = JestMock.Spied<T>;
|
||||
/**
|
||||
* Constructs the type of a spied class.
|
||||
*/
|
||||
type SpiedClass<T extends JestMock.ClassLike> = JestMock.SpiedClass<T>;
|
||||
/**
|
||||
* Constructs the type of a spied function.
|
||||
*/
|
||||
type SpiedFunction<T extends AnyFunction> = JestMock.SpiedFunction<T>;
|
||||
/**
|
||||
* Constructs the type of a spied getter.
|
||||
*/
|
||||
type SpiedGetter<T> = JestMock.SpiedGetter<T>;
|
||||
/**
|
||||
* Constructs the type of a spied setter.
|
||||
*/
|
||||
type SpiedSetter<T> = JestMock.SpiedSetter<T>;
|
||||
}
|
||||
|
||||
export function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<() => T[K]>;
|
||||
|
||||
/**
|
||||
* Describes a group of related tests.
|
||||
*
|
||||
@@ -329,6 +430,9 @@ declare module "bun:test" {
|
||||
any: (
|
||||
constructor: ((..._: any[]) => any) | { new (..._: any[]): any },
|
||||
) => Expect;
|
||||
anything: () => Expect;
|
||||
stringContaining: (str: string) => Expect;
|
||||
stringMatching: (regex: RegExp | string) => Expect;
|
||||
};
|
||||
/**
|
||||
* Asserts that a value matches some criteria.
|
||||
@@ -350,6 +454,20 @@ declare module "bun:test" {
|
||||
* expect(null).not.toBeNull();
|
||||
*/
|
||||
not: Expect<unknown>;
|
||||
/**
|
||||
* Expects the value to be a promise that resolves.
|
||||
*
|
||||
* @example
|
||||
* expect(Promise.resolve(1)).resolves.toBe(1);
|
||||
*/
|
||||
resolves: Expect<unknown>;
|
||||
/**
|
||||
* Expects the value to be a promise that rejects.
|
||||
*
|
||||
* @example
|
||||
* expect(Promise.reject("error")).rejects.toBe("error");
|
||||
*/
|
||||
rejects: Expect<unknown>;
|
||||
/**
|
||||
* Asserts that a value equals what is expected.
|
||||
*
|
||||
@@ -366,6 +484,24 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBe(expected: T): void;
|
||||
/**
|
||||
* Asserts that a number is odd.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/number/#tobeodd
|
||||
* @example
|
||||
* expect(1).toBeOdd();
|
||||
* expect(2).not.toBeOdd();
|
||||
*/
|
||||
toBeOdd(): void;
|
||||
/**
|
||||
* Asserts that a number is even.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/number/#tobeeven
|
||||
* @example
|
||||
* expect(2).toBeEven();
|
||||
* expect(1).not.toBeEven();
|
||||
*/
|
||||
toBeEven(): void;
|
||||
/**
|
||||
* Asserts that value is close to the expected by floating point precision.
|
||||
*
|
||||
@@ -618,6 +754,16 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(propertyMatchers?: Object, hint?: string): void;
|
||||
/**
|
||||
* Asserts that an object matches a subset of properties.
|
||||
*
|
||||
* @example
|
||||
* expect({ a: 1, b: 2 }).toMatchObject({ b: 2 });
|
||||
* expect({ c: new Date(), d: 2 }).toMatchObject({ d: 2 });
|
||||
*
|
||||
* @param subset Subset of properties to match with.
|
||||
*/
|
||||
toMatchObject(subset: Object): void;
|
||||
/**
|
||||
* Asserts that a value is empty.
|
||||
*
|
||||
@@ -636,6 +782,27 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNil();
|
||||
*/
|
||||
toBeNil(): void;
|
||||
/**
|
||||
* Asserts that a value is a `array`.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/array/#tobearray
|
||||
* @example
|
||||
* expect([1]).toBeArray();
|
||||
* expect(new Array(1)).toBeArray();
|
||||
* expect({}).not.toBeArray();
|
||||
*/
|
||||
toBeArray(): void;
|
||||
/**
|
||||
* Asserts that a value is a `array` of a certain length.
|
||||
*
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/array/#tobearrayofsize
|
||||
* @example
|
||||
* expect([]).toBeArrayOfSize(0);
|
||||
* expect([1]).toBeArrayOfSize(1);
|
||||
* expect(new Array(1)).toBeArrayOfSize(1);
|
||||
* expect({}).not.toBeArrayOfSize(0);
|
||||
*/
|
||||
toBeArrayOfSize(size: number): void;
|
||||
/**
|
||||
* Asserts that a value is a `boolean`.
|
||||
*
|
||||
@@ -655,6 +822,26 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeTrue();
|
||||
*/
|
||||
toBeTrue(): void;
|
||||
/**
|
||||
* Asserts that a value matches a specific type.
|
||||
*
|
||||
* @link https://vitest.dev/api/expect.html#tobetypeof
|
||||
* @example
|
||||
* expect(1).toBeTypeOf("number");
|
||||
* expect("hello").toBeTypeOf("string");
|
||||
* expect([]).not.toBeTypeOf("boolean");
|
||||
*/
|
||||
toBeTypeOf(
|
||||
type:
|
||||
| "bigint"
|
||||
| "boolean"
|
||||
| "function"
|
||||
| "number"
|
||||
| "object"
|
||||
| "string"
|
||||
| "symbol"
|
||||
| "undefined",
|
||||
): void;
|
||||
/**
|
||||
* Asserts that a value is `false`.
|
||||
*
|
||||
@@ -782,6 +969,18 @@ declare module "bun:test" {
|
||||
* @param expected the string to end with
|
||||
*/
|
||||
toEndWith(expected: string): void;
|
||||
/**
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
toHaveBeenCalled(): void;
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(expected: number): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
*/
|
||||
// toHaveBeenCalledWith(...expected: Array<unknown>): void;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -789,3 +988,423 @@ declare module "test" {
|
||||
import BunTestModule = require("bun:test");
|
||||
export = BunTestModule;
|
||||
}
|
||||
|
||||
declare namespace JestMock {
|
||||
/**
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
export type ClassLike = {
|
||||
new (...args: any): any;
|
||||
};
|
||||
|
||||
export type ConstructorLikeKeys<T> = keyof {
|
||||
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
|
||||
};
|
||||
|
||||
// export const fn: <T extends FunctionLike = UnknownFunction>(
|
||||
// implementation?: T | undefined,
|
||||
// ) => Mock<T>;
|
||||
|
||||
export type FunctionLike = (...args: any) => any;
|
||||
|
||||
export type MethodLikeKeys<T> = keyof {
|
||||
[K in keyof T as Required<T>[K] extends FunctionLike ? K : never]: T[K];
|
||||
};
|
||||
|
||||
/**
|
||||
* All what the internal typings need is to be sure that we have any-function.
|
||||
* `FunctionLike` type ensures that and helps to constrain the type as well.
|
||||
* The default of `UnknownFunction` makes sure that `any`s do not leak to the
|
||||
* user side. For instance, calling `fn()` without implementation will return
|
||||
* a mock of `(...args: Array<unknown>) => unknown` type. If implementation
|
||||
* is provided, its typings are inferred correctly.
|
||||
*/
|
||||
// export interface Mock<T extends FunctionLike = UnknownFunction>
|
||||
// extends Function,
|
||||
// MockInstance<T> {
|
||||
// new (...args: Parameters<T>): ReturnType<T>;
|
||||
// (...args: Parameters<T>): ReturnType<T>;
|
||||
// }
|
||||
|
||||
// export type Mocked<T> = T extends ClassLike
|
||||
// ? MockedClass<T>
|
||||
// : T extends FunctionLike
|
||||
// ? MockedFunction<T>
|
||||
// : T extends object
|
||||
// ? MockedObject<T>
|
||||
// : T;
|
||||
|
||||
// export const mocked: {
|
||||
// <T extends object>(
|
||||
// source: T,
|
||||
// options?: {
|
||||
// shallow: false;
|
||||
// },
|
||||
// ): Mocked<T>;
|
||||
// <T_1 extends object>(
|
||||
// source: T_1,
|
||||
// options: {
|
||||
// shallow: true;
|
||||
// },
|
||||
// ): MockedShallow<T_1>;
|
||||
// };
|
||||
|
||||
// export type MockedClass<T extends ClassLike> = MockInstance<
|
||||
// (...args: ConstructorParameters<T>) => Mocked<InstanceType<T>>
|
||||
// > &
|
||||
// MockedObject<T>;
|
||||
|
||||
// export type MockedFunction<T extends FunctionLike> = MockInstance<T> &
|
||||
// MockedObject<T>;
|
||||
|
||||
// type MockedFunctionShallow<T extends FunctionLike> = MockInstance<T> & T;
|
||||
|
||||
// export type MockedObject<T extends object> = {
|
||||
// [K in keyof T]: T[K] extends ClassLike
|
||||
// ? MockedClass<T[K]>
|
||||
// : T[K] extends FunctionLike
|
||||
// ? MockedFunction<T[K]>
|
||||
// : T[K] extends object
|
||||
// ? MockedObject<T[K]>
|
||||
// : T[K];
|
||||
// } & T;
|
||||
|
||||
// type MockedObjectShallow<T extends object> = {
|
||||
// [K in keyof T]: T[K] extends ClassLike
|
||||
// ? MockedClass<T[K]>
|
||||
// : T[K] extends FunctionLike
|
||||
// ? MockedFunctionShallow<T[K]>
|
||||
// : T[K];
|
||||
// } & T;
|
||||
|
||||
// export type MockedShallow<T> = T extends ClassLike
|
||||
// ? MockedClass<T>
|
||||
// : T extends FunctionLike
|
||||
// ? MockedFunctionShallow<T>
|
||||
// : T extends object
|
||||
// ? MockedObjectShallow<T>
|
||||
// : T;
|
||||
|
||||
// export type MockFunctionMetadata<
|
||||
// T = unknown,
|
||||
// MetadataType = MockMetadataType,
|
||||
// > = MockMetadata<T, MetadataType>;
|
||||
|
||||
// export type MockFunctionMetadataType = MockMetadataType;
|
||||
|
||||
type MockFunctionResult<T extends FunctionLike = UnknownFunction> =
|
||||
| MockFunctionResultIncomplete
|
||||
| MockFunctionResultReturn<T>
|
||||
| MockFunctionResultThrow;
|
||||
|
||||
type MockFunctionResultIncomplete = {
|
||||
type: "incomplete";
|
||||
/**
|
||||
* Result of a single call to a mock function that has not yet completed.
|
||||
* This occurs if you test the result from within the mock function itself,
|
||||
* or from within a function that was called by the mock.
|
||||
*/
|
||||
value: undefined;
|
||||
};
|
||||
|
||||
type MockFunctionResultReturn<T extends FunctionLike = UnknownFunction> = {
|
||||
type: "return";
|
||||
/**
|
||||
* Result of a single call to a mock function that returned.
|
||||
*/
|
||||
value: ReturnType<T>;
|
||||
};
|
||||
|
||||
type MockFunctionResultThrow = {
|
||||
type: "throw";
|
||||
/**
|
||||
* Result of a single call to a mock function that threw.
|
||||
*/
|
||||
value: unknown;
|
||||
};
|
||||
|
||||
type MockFunctionState<T extends FunctionLike = FunctionLike> = {
|
||||
/**
|
||||
* List of the call arguments of all calls that have been made to the mock.
|
||||
*/
|
||||
calls: Array<Parameters<T>>;
|
||||
/**
|
||||
* List of all the object instances that have been instantiated from the mock.
|
||||
*/
|
||||
instances: Array<ReturnType<T>>;
|
||||
/**
|
||||
* List of all the function contexts that have been applied to calls to the mock.
|
||||
*/
|
||||
contexts: Array<ThisParameterType<T>>;
|
||||
/**
|
||||
* List of the call order indexes of the mock. Jest is indexing the order of
|
||||
* invocations of all mocks in a test file. The index is starting with `1`.
|
||||
*/
|
||||
invocationCallOrder: Array<number>;
|
||||
/**
|
||||
* List of the call arguments of the last call that was made to the mock.
|
||||
* If the function was not called, it will return `undefined`.
|
||||
*/
|
||||
lastCall?: Parameters<T>;
|
||||
/**
|
||||
* List of the results of all calls that have been made to the mock.
|
||||
*/
|
||||
results: Array<MockFunctionResult<T>>;
|
||||
};
|
||||
|
||||
export interface MockInstance<T extends FunctionLike = UnknownFunction> {
|
||||
_isMockFunction: true;
|
||||
_protoImpl: Function;
|
||||
getMockImplementation(): T | undefined;
|
||||
getMockName(): string;
|
||||
mock: MockFunctionState<T>;
|
||||
mockClear(): this;
|
||||
mockReset(): this;
|
||||
mockRestore(): void;
|
||||
mockImplementation(fn: T): this;
|
||||
mockImplementationOnce(fn: T): this;
|
||||
withImplementation(fn: T, callback: () => Promise<unknown>): Promise<void>;
|
||||
withImplementation(fn: T, callback: () => void): void;
|
||||
mockName(name: string): this;
|
||||
mockReturnThis(): this;
|
||||
mockReturnValue(value: ReturnType<T>): this;
|
||||
mockReturnValueOnce(value: ReturnType<T>): this;
|
||||
mockResolvedValue(value: ResolveType<T>): this;
|
||||
mockResolvedValueOnce(value: ResolveType<T>): this;
|
||||
mockRejectedValue(value: RejectType<T>): this;
|
||||
mockRejectedValueOnce(value: RejectType<T>): this;
|
||||
}
|
||||
|
||||
// export type MockMetadata<T, MetadataType = MockMetadataType> = {
|
||||
// ref?: number;
|
||||
// members?: Record<string, MockMetadata<T>>;
|
||||
// mockImpl?: T;
|
||||
// name?: string;
|
||||
// refID?: number;
|
||||
// type?: MetadataType;
|
||||
// value?: T;
|
||||
// length?: number;
|
||||
// };
|
||||
|
||||
// export type MockMetadataType =
|
||||
// | "object"
|
||||
// | "array"
|
||||
// | "regexp"
|
||||
// | "function"
|
||||
// | "constant"
|
||||
// | "collection"
|
||||
// | "null"
|
||||
// | "undefined";
|
||||
|
||||
// export class ModuleMocker {
|
||||
// private readonly _environmentGlobal;
|
||||
// private _mockState;
|
||||
// private _mockConfigRegistry;
|
||||
// private _spyState;
|
||||
// private _invocationCallCounter;
|
||||
// /**
|
||||
// * @see README.md
|
||||
// * @param global Global object of the test environment, used to create
|
||||
// * mocks
|
||||
// */
|
||||
// constructor(global: typeof globalThis);
|
||||
// private _getSlots;
|
||||
// private _ensureMockConfig;
|
||||
// private _ensureMockState;
|
||||
// private _defaultMockConfig;
|
||||
// private _defaultMockState;
|
||||
// private _makeComponent;
|
||||
// private _createMockFunction;
|
||||
// private _generateMock;
|
||||
// /**
|
||||
// * Check whether the given property of an object has been already replaced.
|
||||
// */
|
||||
// private _findReplacedProperty;
|
||||
// /**
|
||||
// * @see README.md
|
||||
// * @param metadata Metadata for the mock in the schema returned by the
|
||||
// * getMetadata method of this module.
|
||||
// */
|
||||
// generateFromMetadata<T>(metadata: MockMetadata<T>): Mocked<T>;
|
||||
// /**
|
||||
// * @see README.md
|
||||
// * @param component The component for which to retrieve metadata.
|
||||
// */
|
||||
// getMetadata<T = unknown>(
|
||||
// component: T,
|
||||
// _refs?: Map<T, number>,
|
||||
// ): MockMetadata<T> | null;
|
||||
// isMockFunction<T extends FunctionLike = UnknownFunction>(
|
||||
// fn: MockInstance<T>,
|
||||
// ): fn is MockInstance<T>;
|
||||
// isMockFunction<P extends Array<unknown>, R>(
|
||||
// fn: (...args: P) => R,
|
||||
// ): fn is Mock<(...args: P) => R>;
|
||||
// isMockFunction(fn: unknown): fn is Mock<UnknownFunction>;
|
||||
// fn<T extends FunctionLike = UnknownFunction>(implementation?: T): Mock<T>;
|
||||
// private _attachMockImplementation;
|
||||
// spyOn<
|
||||
// T extends object,
|
||||
// K extends PropertyLikeKeys<T>,
|
||||
// A extends "get" | "set",
|
||||
// >(
|
||||
// object: T,
|
||||
// methodKey: K,
|
||||
// accessType: A,
|
||||
// ): A extends "get"
|
||||
// ? SpiedGetter<T[K]>
|
||||
// : A extends "set"
|
||||
// ? SpiedSetter<T[K]>
|
||||
// : never;
|
||||
// spyOn<
|
||||
// T extends object,
|
||||
// K extends ConstructorLikeKeys<T> | MethodLikeKeys<T>,
|
||||
// V extends Required<T>[K],
|
||||
// >(
|
||||
// object: T,
|
||||
// methodKey: K,
|
||||
// ): V extends ClassLike | FunctionLike ? Spied<V> : never;
|
||||
// private _spyOnProperty;
|
||||
// replaceProperty<
|
||||
// T extends object,
|
||||
// K extends PropertyLikeKeys<T>,
|
||||
// V extends T[K],
|
||||
// >(object: T, propertyKey: K, value: V): Replaced<T[K]>;
|
||||
// clearAllMocks(): void;
|
||||
// resetAllMocks(): void;
|
||||
// restoreAllMocks(): void;
|
||||
// private _typeOf;
|
||||
// mocked<T extends object>(
|
||||
// source: T,
|
||||
// options?: {
|
||||
// shallow: false;
|
||||
// },
|
||||
// ): Mocked<T>;
|
||||
// mocked<T extends object>(
|
||||
// source: T,
|
||||
// options: {
|
||||
// shallow: true;
|
||||
// },
|
||||
// ): MockedShallow<T>;
|
||||
// }
|
||||
|
||||
export type PropertyLikeKeys<T> = Exclude<
|
||||
keyof T,
|
||||
ConstructorLikeKeys<T> | MethodLikeKeys<T>
|
||||
>;
|
||||
|
||||
export type RejectType<T extends FunctionLike> =
|
||||
ReturnType<T> extends PromiseLike<any> ? unknown : never;
|
||||
|
||||
export interface Replaced<T = unknown> {
|
||||
/**
|
||||
* Restore property to its original value known at the time of mocking.
|
||||
*/
|
||||
restore(): void;
|
||||
/**
|
||||
* Change the value of the property.
|
||||
*/
|
||||
replaceValue(value: T): this;
|
||||
}
|
||||
|
||||
export const replaceProperty: <
|
||||
T extends object,
|
||||
K_2 extends Exclude<
|
||||
keyof T,
|
||||
| keyof {
|
||||
[K in keyof T as Required<T>[K] extends ClassLike ? K : never]: T[K];
|
||||
}
|
||||
| keyof {
|
||||
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike
|
||||
? K_1
|
||||
: never]: T[K_1];
|
||||
}
|
||||
>,
|
||||
V extends T[K_2],
|
||||
>(
|
||||
object: T,
|
||||
propertyKey: K_2,
|
||||
value: V,
|
||||
) => Replaced<T[K_2]>;
|
||||
|
||||
export type ResolveType<T extends FunctionLike> =
|
||||
ReturnType<T> extends PromiseLike<infer U> ? U : never;
|
||||
|
||||
export type Spied<T extends ClassLike | FunctionLike> = T extends ClassLike
|
||||
? SpiedClass<T>
|
||||
: T extends FunctionLike
|
||||
? SpiedFunction<T>
|
||||
: never;
|
||||
|
||||
export type SpiedClass<T extends ClassLike = UnknownClass> = MockInstance<
|
||||
(...args: ConstructorParameters<T>) => InstanceType<T>
|
||||
>;
|
||||
|
||||
export type SpiedFunction<T extends FunctionLike = UnknownFunction> =
|
||||
MockInstance<(...args: Parameters<T>) => ReturnType<T>>;
|
||||
|
||||
export type SpiedGetter<T> = MockInstance<() => T>;
|
||||
|
||||
export type SpiedSetter<T> = MockInstance<(arg: T) => void>;
|
||||
|
||||
export interface SpyInstance<T extends FunctionLike = UnknownFunction>
|
||||
extends MockInstance<T> {}
|
||||
|
||||
export const spyOn: {
|
||||
<
|
||||
T extends object,
|
||||
K_2 extends Exclude<
|
||||
keyof T,
|
||||
| keyof {
|
||||
[K in keyof T as Required<T>[K] extends ClassLike
|
||||
? K
|
||||
: never]: T[K];
|
||||
}
|
||||
| keyof {
|
||||
[K_1 in keyof T as Required<T>[K_1] extends FunctionLike
|
||||
? K_1
|
||||
: never]: T[K_1];
|
||||
}
|
||||
>,
|
||||
V extends Required<T>[K_2],
|
||||
A extends "set" | "get",
|
||||
>(
|
||||
object: T,
|
||||
methodKey: K_2,
|
||||
accessType: A,
|
||||
): A extends "get"
|
||||
? SpiedGetter<V>
|
||||
: A extends "set"
|
||||
? SpiedSetter<V>
|
||||
: never;
|
||||
<
|
||||
T_1 extends object,
|
||||
K_5 extends
|
||||
| keyof {
|
||||
[K_3 in keyof T_1 as Required<T_1>[K_3] extends ClassLike
|
||||
? K_3
|
||||
: never]: T_1[K_3];
|
||||
}
|
||||
| keyof {
|
||||
[K_4 in keyof T_1 as Required<T_1>[K_4] extends FunctionLike
|
||||
? K_4
|
||||
: never]: T_1[K_4];
|
||||
},
|
||||
V_1 extends Required<T_1>[K_5],
|
||||
>(
|
||||
object: T_1,
|
||||
methodKey: K_5,
|
||||
): V_1 extends ClassLike | FunctionLike ? Spied<V_1> : never;
|
||||
};
|
||||
|
||||
export type UnknownClass = {
|
||||
new (...args: Array<unknown>): unknown;
|
||||
};
|
||||
|
||||
export type UnknownFunction = (...args: Array<unknown>) => unknown;
|
||||
|
||||
export {};
|
||||
}
|
||||
|
||||
855
packages/bun-types/bun.d.ts
vendored
855
packages/bun-types/bun.d.ts
vendored
File diff suppressed because it is too large
Load Diff
2
packages/bun-types/ffi.d.ts
vendored
2
packages/bun-types/ffi.d.ts
vendored
@@ -344,6 +344,7 @@ declare module "bun:ffi" {
|
||||
*
|
||||
*/
|
||||
u64_fast = 16,
|
||||
function = 17,
|
||||
}
|
||||
|
||||
type UNTYPED = never;
|
||||
@@ -379,6 +380,7 @@ declare module "bun:ffi" {
|
||||
[FFIType.cstring]: CString;
|
||||
[FFIType.i64_fast]: number | bigint;
|
||||
[FFIType.u64_fast]: number | bigint;
|
||||
[FFIType.function]: (...args: any[]) => any;
|
||||
}
|
||||
interface FFITypeStringToType {
|
||||
["char"]: FFIType.char;
|
||||
|
||||
142
packages/bun-types/fs.d.ts
vendored
142
packages/bun-types/fs.d.ts
vendored
@@ -19,6 +19,7 @@
|
||||
*/
|
||||
declare module "fs" {
|
||||
import * as stream from "stream";
|
||||
import type EventEmitter from "events";
|
||||
import type { SystemError, ArrayBufferView } from "bun";
|
||||
interface ObjectEncodingOptions {
|
||||
encoding?: BufferEncoding | null | undefined;
|
||||
@@ -3648,7 +3649,7 @@ declare module "fs" {
|
||||
// prependOnceListener(event: 'unpipe', listener: (src: stream.Readable) => void): this;
|
||||
// prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
}
|
||||
function fdatasync(fd: number, callback: NoParamCallback): void;
|
||||
// function fdatasync(fd: number, callback: NoParamCallback): void;
|
||||
// namespace fdatasync {
|
||||
// /**
|
||||
// * Asynchronous fdatasync(2) - synchronize a file's in-core state with storage device.
|
||||
@@ -3661,7 +3662,7 @@ declare module "fs" {
|
||||
* operating system's synchronized I/O completion state. Refer to the POSIX [`fdatasync(2)`](http://man7.org/linux/man-pages/man2/fdatasync.2.html) documentation for details. Returns `undefined`.
|
||||
* @since v0.0.67
|
||||
*/
|
||||
function fdatasyncSync(fd: number): void;
|
||||
// function fdatasyncSync(fd: number): void;
|
||||
/**
|
||||
* Asynchronously copies `src` to `dest`. By default, `dest` is overwritten if it
|
||||
* already exists. No arguments other than a possible exception are given to the
|
||||
@@ -3929,6 +3930,143 @@ declare module "fs" {
|
||||
*/
|
||||
recursive?: boolean;
|
||||
}
|
||||
|
||||
export interface FSWatcher extends EventEmitter {
|
||||
/**
|
||||
* Stop watching for changes on the given `fs.FSWatcher`. Once stopped, the `fs.FSWatcher` object is no longer usable.
|
||||
* @since v0.6.8
|
||||
*/
|
||||
close(): void;
|
||||
|
||||
/**
|
||||
* When called, requests that the Node.js event loop not exit so long as the <fs.FSWatcher> is active. Calling watcher.ref() multiple times will have no effect.
|
||||
*/
|
||||
ref(): void;
|
||||
|
||||
/**
|
||||
* When called, the active <fs.FSWatcher> object will not require the Node.js event loop to remain active. If there is no other activity keeping the event loop running, the process may exit before the <fs.FSWatcher> object's callback is invoked. Calling watcher.unref() multiple times will have no effect.
|
||||
*/
|
||||
unref(): void;
|
||||
|
||||
/**
|
||||
* events.EventEmitter
|
||||
* 1. change
|
||||
* 2. error
|
||||
*/
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(
|
||||
event: "change",
|
||||
listener: (eventType: string, filename: string | Buffer) => void,
|
||||
): this;
|
||||
addListener(event: "error", listener: (error: Error) => void): this;
|
||||
addListener(event: "close", listener: () => void): this;
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(
|
||||
event: "change",
|
||||
listener: (eventType: string, filename: string | Buffer) => void,
|
||||
): this;
|
||||
on(event: "error", listener: (error: Error) => void): this;
|
||||
on(event: "close", listener: () => void): this;
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(
|
||||
event: "change",
|
||||
listener: (eventType: string, filename: string | Buffer) => void,
|
||||
): this;
|
||||
once(event: "error", listener: (error: Error) => void): this;
|
||||
once(event: "close", listener: () => void): this;
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(
|
||||
event: "change",
|
||||
listener: (eventType: string, filename: string | Buffer) => void,
|
||||
): this;
|
||||
prependListener(event: "error", listener: (error: Error) => void): this;
|
||||
prependListener(event: "close", listener: () => void): this;
|
||||
prependOnceListener(
|
||||
event: string,
|
||||
listener: (...args: any[]) => void,
|
||||
): this;
|
||||
prependOnceListener(
|
||||
event: "change",
|
||||
listener: (eventType: string, filename: string | Buffer) => void,
|
||||
): this;
|
||||
prependOnceListener(event: "error", listener: (error: Error) => void): this;
|
||||
prependOnceListener(event: "close", listener: () => void): this;
|
||||
}
|
||||
|
||||
type WatchOptions = {
|
||||
encoding?: BufferEncoding;
|
||||
persistent?: boolean;
|
||||
recursive?: boolean;
|
||||
signal?: AbortSignal;
|
||||
};
|
||||
export type WatchEventType = "rename" | "change" | "error" | "close";
|
||||
type WatchListener<T> = (
|
||||
event: WatchEventType,
|
||||
filename: T | Error | undefined,
|
||||
) => void;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a
|
||||
* directory.
|
||||
*
|
||||
* The second argument is optional. If `options` is provided as a string, it
|
||||
* specifies the `encoding`. Otherwise `options` should be passed as an object.
|
||||
*
|
||||
* The listener callback gets two arguments `(eventType, filename)`. `eventType`is either `'rename'` or `'change'`, and `filename` is the name of the file
|
||||
* which triggered the event.
|
||||
*
|
||||
* On most platforms, `'rename'` is emitted whenever a filename appears or
|
||||
* disappears in the directory.
|
||||
*
|
||||
* The listener callback is attached to the `'change'` event fired by `fs.FSWatcher`, but it is not the same thing as the `'change'` value of`eventType`.
|
||||
*
|
||||
* If a `signal` is passed, aborting the corresponding AbortController will close
|
||||
* the returned `fs.FSWatcher`.
|
||||
* @since v0.6.8
|
||||
* @param listener
|
||||
*/
|
||||
export function watch(
|
||||
filename: PathLike,
|
||||
options:
|
||||
| (WatchOptions & {
|
||||
encoding: "buffer";
|
||||
})
|
||||
| "buffer",
|
||||
listener?: WatchListener<Buffer>,
|
||||
): FSWatcher;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
export function watch(
|
||||
filename: PathLike,
|
||||
options?: WatchOptions | BufferEncoding | null,
|
||||
listener?: WatchListener<string>,
|
||||
): FSWatcher;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
export function watch(
|
||||
filename: PathLike,
|
||||
options: WatchOptions | string,
|
||||
listener?: WatchListener<string | Buffer>,
|
||||
): FSWatcher;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
*/
|
||||
export function watch(
|
||||
filename: PathLike,
|
||||
listener?: WatchListener<string>,
|
||||
): FSWatcher;
|
||||
}
|
||||
|
||||
declare module "node:fs" {
|
||||
|
||||
71
packages/bun-types/fs/promises.d.ts
vendored
71
packages/bun-types/fs/promises.d.ts
vendored
@@ -26,6 +26,8 @@ declare module "fs/promises" {
|
||||
Abortable,
|
||||
RmOptions,
|
||||
RmDirOptions,
|
||||
WatchOptions,
|
||||
WatchEventType,
|
||||
} from "node:fs";
|
||||
|
||||
const constants: typeof import("node:fs")["constants"];
|
||||
@@ -709,6 +711,75 @@ declare module "fs/promises" {
|
||||
* To remove a directory recursively, use `fs.promises.rm()` instead, with the `recursive` option set to `true`.
|
||||
*/
|
||||
function rmdir(path: PathLike, options?: RmDirOptions): Promise<void>;
|
||||
|
||||
interface FileChangeInfo<T extends string | Buffer> {
|
||||
eventType: WatchEventType;
|
||||
filename: T;
|
||||
}
|
||||
/**
|
||||
* Returns an async iterator that watches for changes on `filename`, where `filename`is either a file or a directory.
|
||||
*
|
||||
* ```js
|
||||
* const { watch } = require('node:fs/promises');
|
||||
*
|
||||
* const ac = new AbortController();
|
||||
* const { signal } = ac;
|
||||
* setTimeout(() => ac.abort(), 10000);
|
||||
*
|
||||
* (async () => {
|
||||
* try {
|
||||
* const watcher = watch(__filename, { signal });
|
||||
* for await (const event of watcher)
|
||||
* console.log(event);
|
||||
* } catch (err) {
|
||||
* if (err.name === 'AbortError')
|
||||
* return;
|
||||
* throw err;
|
||||
* }
|
||||
* })();
|
||||
* ```
|
||||
*
|
||||
* On most platforms, `'rename'` is emitted whenever a filename appears or
|
||||
* disappears in the directory.
|
||||
*
|
||||
* All the `caveats` for `fs.watch()` also apply to `fsPromises.watch()`.
|
||||
* @since v0.6.8
|
||||
* @return of objects with the properties:
|
||||
*/
|
||||
function watch(
|
||||
filename: PathLike,
|
||||
options:
|
||||
| (WatchOptions & {
|
||||
encoding: "buffer";
|
||||
})
|
||||
| "buffer",
|
||||
): AsyncIterable<FileChangeInfo<Buffer>>;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
function watch(
|
||||
filename: PathLike,
|
||||
options?: WatchOptions | BufferEncoding,
|
||||
): AsyncIterable<FileChangeInfo<string>>;
|
||||
/**
|
||||
* Watch for changes on `filename`, where `filename` is either a file or a directory, returning an `FSWatcher`.
|
||||
* @param filename A path to a file or directory. If a URL is provided, it must use the `file:` protocol.
|
||||
* @param options Either the encoding for the filename provided to the listener, or an object optionally specifying encoding, persistent, and recursive options.
|
||||
* If `encoding` is not supplied, the default of `'utf8'` is used.
|
||||
* If `persistent` is not supplied, the default of `true` is used.
|
||||
* If `recursive` is not supplied, the default of `false` is used.
|
||||
*/
|
||||
function watch(
|
||||
filename: PathLike,
|
||||
options: WatchOptions | string,
|
||||
):
|
||||
| AsyncIterable<FileChangeInfo<string>>
|
||||
| AsyncIterable<FileChangeInfo<Buffer>>;
|
||||
}
|
||||
|
||||
declare module "node:fs/promises" {
|
||||
|
||||
591
packages/bun-types/globals.d.ts
vendored
591
packages/bun-types/globals.d.ts
vendored
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* "blob" is not supported yet
|
||||
*/
|
||||
type BinaryType = "arraybuffer" | "nodebuffer" | "blob";
|
||||
type BinaryType = "nodebuffer" | "arraybuffer" | "blob";
|
||||
type Transferable = ArrayBuffer;
|
||||
type MessageEventSource = undefined;
|
||||
type Encoding = "utf-8" | "windows-1252" | "utf-16";
|
||||
@@ -75,6 +75,34 @@ interface ArrayConstructor {
|
||||
): Promise<Array<T>>;
|
||||
}
|
||||
|
||||
type UncaughtExceptionOrigin = "uncaughtException" | "unhandledRejection";
|
||||
type MultipleResolveType = "resolve" | "reject";
|
||||
type BeforeExitListener = (code: number) => void;
|
||||
type DisconnectListener = () => void;
|
||||
type ExitListener = (code: number) => void;
|
||||
type RejectionHandledListener = (promise: Promise<unknown>) => void;
|
||||
type UncaughtExceptionListener = (
|
||||
error: Error,
|
||||
origin: UncaughtExceptionOrigin,
|
||||
) => void;
|
||||
/**
|
||||
* Most of the time the unhandledRejection will be an Error, but this should not be relied upon
|
||||
* as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error.
|
||||
*/
|
||||
type UnhandledRejectionListener = (
|
||||
reason: unknown,
|
||||
promise: Promise<unknown>,
|
||||
) => void;
|
||||
type WarningListener = (warning: Error) => void;
|
||||
type MessageListener = (message: unknown, sendHandle: unknown) => void;
|
||||
type SignalsListener = (signal: Signals) => void;
|
||||
type MultipleResolveListener = (
|
||||
type: MultipleResolveType,
|
||||
promise: Promise<unknown>,
|
||||
value: unknown,
|
||||
) => void;
|
||||
// type WorkerListener = (worker: Worker) => void;
|
||||
|
||||
interface Console {
|
||||
/**
|
||||
* Asynchronously read lines from standard input (fd 0)
|
||||
@@ -328,6 +356,16 @@ interface StructuredSerializeOptions {
|
||||
transfer?: Transferable[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a deep clone of an object.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/structuredClone)
|
||||
*/
|
||||
declare function structuredClone<T>(
|
||||
value: T,
|
||||
options?: StructuredSerializeOptions,
|
||||
): T;
|
||||
|
||||
interface EncodeIntoResult {
|
||||
/**
|
||||
* The read Unicode code units of input.
|
||||
@@ -369,6 +407,8 @@ interface Process {
|
||||
argv: string[];
|
||||
execArgv: string[];
|
||||
env: import("bun").Env;
|
||||
allowedNodeEnvironmentFlags: Set<string>;
|
||||
debugPort: number;
|
||||
|
||||
/** Whether you are using Bun */
|
||||
isBun: 1; // FIXME: this should actually return a boolean
|
||||
@@ -377,15 +417,28 @@ interface Process {
|
||||
chdir(directory: string): void;
|
||||
cwd(): string;
|
||||
exit(code?: number): never;
|
||||
reallyExit(code?: number): never;
|
||||
getgid(): number;
|
||||
setgid(id: number | string): void;
|
||||
// setgid(id: number | string): void;
|
||||
getuid(): number;
|
||||
setuid(id: number | string): void;
|
||||
// setuid(id: number | string): void;
|
||||
geteuid: () => number;
|
||||
// seteuid: (id: number | string) => void;
|
||||
getegid: () => number;
|
||||
// setegid: (id: number | string) => void;
|
||||
getgroups: () => number[];
|
||||
// setgroups?: (groups: ReadonlyArray<string | number>) => void;
|
||||
dlopen(module: { exports: any }, filename: string, flags?: number): void;
|
||||
stdin: import("stream").Duplex & { isTTY: boolean };
|
||||
stdout: import("stream").Writable & { isTTY: boolean };
|
||||
stderr: import("stream").Writable & { isTTY: boolean };
|
||||
|
||||
/**
|
||||
*
|
||||
* @deprecated This is deprecated; use the "node:assert" module instead.
|
||||
*/
|
||||
assert(value: unknown, message?: string | Error): asserts value;
|
||||
|
||||
/**
|
||||
* exit the process with a fatal exception, sending SIGABRT
|
||||
*/
|
||||
@@ -420,6 +473,77 @@ interface Process {
|
||||
emitWarning(warning: string | Error /*name?: string, ctor?: Function*/): void;
|
||||
|
||||
readonly config: Object;
|
||||
|
||||
memoryUsage: {
|
||||
(delta?: MemoryUsageObject): MemoryUsageObject;
|
||||
|
||||
rss(): number;
|
||||
};
|
||||
|
||||
cpuUsage(previousValue?: CPUUsageObject): CPUUsageObject;
|
||||
|
||||
/**
|
||||
* Does nothing in Bun
|
||||
*/
|
||||
setSourceMapsEnabled(enabled: boolean): void;
|
||||
|
||||
kill(pid: number, signal?: string | number): void;
|
||||
|
||||
on(event: "beforeExit", listener: BeforeExitListener): this;
|
||||
// on(event: "disconnect", listener: DisconnectListener): this;
|
||||
on(event: "exit", listener: ExitListener): this;
|
||||
// on(event: "rejectionHandled", listener: RejectionHandledListener): this;
|
||||
// on(event: "uncaughtException", listener: UncaughtExceptionListener): this;
|
||||
// on(
|
||||
// event: "uncaughtExceptionMonitor",
|
||||
// listener: UncaughtExceptionListener,
|
||||
// ): this;
|
||||
// on(event: "unhandledRejection", listener: UnhandledRejectionListener): this;
|
||||
// on(event: "warning", listener: WarningListener): this;
|
||||
// on(event: "message", listener: MessageListener): this;
|
||||
on(event: Signals, listener: SignalsListener): this;
|
||||
// on(event: "multipleResolves", listener: MultipleResolveListener): this;
|
||||
// on(event: "worker", listener: WorkerListener): this;
|
||||
on(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
once(event: "beforeExit", listener: BeforeExitListener): this;
|
||||
// once(event: "disconnect", listener: DisconnectListener): this;
|
||||
once(event: "exit", listener: ExitListener): this;
|
||||
// once(event: "rejectionHandled", listener: RejectionHandledListener): this;
|
||||
// once(event: "uncaughtException", listener: UncaughtExceptionListener): this;
|
||||
// once(
|
||||
// event: "uncaughtExceptionMonitor",
|
||||
// listener: UncaughtExceptionListener,
|
||||
// ): this;
|
||||
// once(event: "unhandledRejection", listener: UnhandledRejectionListener): this;
|
||||
// once(event: "warning", listener: WarningListener): this;
|
||||
// once(event: "message", listener: MessageListener): this;
|
||||
once(event: Signals, listener: SignalsListener): this;
|
||||
// once(event: "multipleResolves", listener: MultipleResolveListener): this;
|
||||
// once(event: "worker", listener: WorkerListener): this;
|
||||
once(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
|
||||
/**
|
||||
* Returns the number of listeners listening for the event named `eventName`.
|
||||
* If `listener` is provided, it will return how many times the listener is found
|
||||
* in the list of the listeners of the event.
|
||||
* @since v3.2.0
|
||||
* @param eventName The name of the event being listened for
|
||||
* @param listener The event handler function
|
||||
*/
|
||||
listenerCount(eventName: string | symbol, listener?: Function): number;
|
||||
}
|
||||
|
||||
interface MemoryUsageObject {
|
||||
rss: number;
|
||||
heapTotal: number;
|
||||
heapUsed: number;
|
||||
external: number;
|
||||
arrayBuffers: number;
|
||||
}
|
||||
|
||||
interface CPUUsageObject {
|
||||
user: number;
|
||||
system: number;
|
||||
}
|
||||
|
||||
declare var process: Process;
|
||||
@@ -888,6 +1012,12 @@ type ReadableStreamController<T> = ReadableStreamDefaultController<T>;
|
||||
type ReadableStreamDefaultReadResult<T> =
|
||||
| ReadableStreamDefaultReadValueResult<T>
|
||||
| ReadableStreamDefaultReadDoneResult;
|
||||
interface ReadableStreamDefaultReadManyResult<T> {
|
||||
done: boolean;
|
||||
/** Number of bytes */
|
||||
size: number;
|
||||
value: T[];
|
||||
}
|
||||
type ReadableStreamReader<T> = ReadableStreamDefaultReader<T>;
|
||||
|
||||
interface RequestInit {
|
||||
@@ -1395,21 +1525,6 @@ declare function clearTimeout(id?: number | Timer): void;
|
||||
declare function clearImmediate(id?: number | Timer): void;
|
||||
// declare function createImageBitmap(image: ImageBitmapSource, options?: ImageBitmapOptions): Promise<ImageBitmap>;
|
||||
// declare function createImageBitmap(image: ImageBitmapSource, sx: number, sy: number, sw: number, sh: number, options?: ImageBitmapOptions): Promise<ImageBitmap>;
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param url URL string
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
declare function fetch(
|
||||
url: string | URL | Request,
|
||||
init?: FetchRequestInit,
|
||||
): Promise<Response>;
|
||||
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
@@ -1423,6 +1538,20 @@ declare function fetch(
|
||||
*/
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
declare function fetch(request: Request, init?: RequestInit): Promise<Response>;
|
||||
/**
|
||||
* Send a HTTP(s) request
|
||||
*
|
||||
* @param url URL string
|
||||
* @param init A structured value that contains settings for the fetch() request.
|
||||
*
|
||||
* @returns A promise that resolves to {@link Response} object.
|
||||
*
|
||||
*
|
||||
*/
|
||||
declare function fetch(
|
||||
url: string | URL | Request,
|
||||
init?: FetchRequestInit,
|
||||
): Promise<Response>;
|
||||
|
||||
declare function queueMicrotask(callback: (...args: any[]) => void): void;
|
||||
/**
|
||||
@@ -1432,8 +1561,8 @@ declare function queueMicrotask(callback: (...args: any[]) => void): void;
|
||||
declare function reportError(error: any): void;
|
||||
|
||||
interface Timer {
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
ref(): Timer;
|
||||
unref(): Timer;
|
||||
hasRef(): boolean;
|
||||
|
||||
[Symbol.toPrimitive](): number;
|
||||
@@ -1802,101 +1931,265 @@ declare var CustomEvent: {
|
||||
};
|
||||
|
||||
/**
|
||||
* An implementation of the [WebSocket API](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket)
|
||||
* A map of WebSocket event names to event types.
|
||||
*/
|
||||
interface WebSocketEventMap {
|
||||
close: CloseEvent;
|
||||
error: Event;
|
||||
message: MessageEvent<Buffer | ArrayBuffer | string>;
|
||||
type WebSocketEventMap = {
|
||||
open: Event;
|
||||
}
|
||||
message: MessageEvent<string | Buffer>;
|
||||
close: CloseEvent;
|
||||
ping: MessageEvent<Buffer>;
|
||||
pong: MessageEvent<Buffer>;
|
||||
error: Event;
|
||||
};
|
||||
|
||||
/** Provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. */
|
||||
/**
|
||||
* A state that represents if a WebSocket is connected.
|
||||
*
|
||||
* - `WebSocket.CONNECTING` is `0`, the connection is pending.
|
||||
* - `WebSocket.OPEN` is `1`, the connection is established and `send()` is possible.
|
||||
* - `WebSocket.CLOSING` is `2`, the connection is closing.
|
||||
* - `WebSocket.CLOSED` is `3`, the connection is closed or couldn't be opened.
|
||||
*
|
||||
* @link https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/readyState
|
||||
*/
|
||||
type WebSocketReadyState = 0 | 1 | 2 | 3;
|
||||
|
||||
/**
|
||||
* A client that makes an outgoing WebSocket connection.
|
||||
*
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/API/WebSocket
|
||||
* @example
|
||||
* const ws = new WebSocket("wss://ws.postman-echo.com/raw");
|
||||
*
|
||||
* ws.addEventListener("open", () => {
|
||||
* console.log("Connected");
|
||||
* });
|
||||
* ws.addEventListener("message", ({ data }) => {
|
||||
* console.log("Received:", data); // string or Buffer
|
||||
* });
|
||||
* ws.addEventListener("close", ({ code, reason }) => {
|
||||
* console.log("Disconnected:", code, reason);
|
||||
* });
|
||||
*/
|
||||
interface WebSocket extends EventTarget {
|
||||
/**
|
||||
* Returns a string that indicates how binary data from the WebSocket object is exposed to scripts:
|
||||
* Sends a message.
|
||||
*
|
||||
* Can be set, to change how binary data is returned. The default is `"arraybuffer"`.
|
||||
* @param data the string, ArrayBuffer, or ArrayBufferView to send
|
||||
* @example
|
||||
* let ws: WebSocket;
|
||||
* ws.send("Hello!");
|
||||
* ws.send(new TextEncoder().encode("Hello?"));
|
||||
*/
|
||||
send(data: string | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Closes the connection.
|
||||
*
|
||||
* Unlike in browsers, you can also set `binaryType` to `"nodebuffer"` to receive a {@link Buffer} object.
|
||||
* Here is a list of close codes:
|
||||
* - `1000` means "normal closure" **(default)**
|
||||
* - `1001` means the client is "going away"
|
||||
* - `1009` means a message was too big and was rejected
|
||||
* - `1011` means the server encountered an error
|
||||
* - `1012` means the server is restarting
|
||||
* - `1013` means the server is too busy or the client is rate-limited
|
||||
* - `4000` through `4999` are reserved for applications (you can use it!)
|
||||
*
|
||||
* To abruptly close the connection without a code, use `terminate()` instead.
|
||||
*
|
||||
* @param code the close code
|
||||
* @param reason the close reason
|
||||
* @example
|
||||
* let ws: WebSocket;
|
||||
* ws.close(1013, "Exceeded the rate limit of 100 messages per minute.");
|
||||
*/
|
||||
close(code?: number, reason?: string): void;
|
||||
|
||||
/**
|
||||
* Closes the connection, abruptly.
|
||||
*
|
||||
* To gracefuly close the connection, use `close()` instead.
|
||||
*/
|
||||
terminate(): void;
|
||||
|
||||
/**
|
||||
* Sends a ping.
|
||||
*
|
||||
* @param data the string, ArrayBuffer, or ArrayBufferView to send
|
||||
*/
|
||||
ping(data?: string | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Sends a pong.
|
||||
*
|
||||
* @param data the string, ArrayBuffer, or ArrayBufferView to send
|
||||
*/
|
||||
pong(data?: string | BufferSource): void;
|
||||
|
||||
/**
|
||||
* Sets how binary data is returned in events.
|
||||
*
|
||||
* - if `nodebuffer`, binary data is returned as `Buffer` objects. **(default)**
|
||||
* - if `arraybuffer`, binary data is returned as `ArrayBuffer` objects.
|
||||
* - if `blob`, binary data is returned as `Blob` objects. **(not supported)**
|
||||
*
|
||||
* In browsers, the default is `blob`, however in Bun, the default is `nodebuffer`.
|
||||
*
|
||||
* @example
|
||||
* let ws: WebSocket;
|
||||
* ws.binaryType = "arraybuffer";
|
||||
* ws.addEventListener("message", ({ data }) => {
|
||||
* console.log(data instanceof ArrayBuffer); // true
|
||||
* });
|
||||
*/
|
||||
binaryType: BinaryType;
|
||||
|
||||
/**
|
||||
* Returns the number of bytes of application data (UTF-8 text and binary data) that have been queued using send() but not yet been transmitted to the network.
|
||||
* The ready state of the connection.
|
||||
*
|
||||
* If the WebSocket connection is closed, this attribute's value will only increase with each call to the send() method. (The number does not reset to zero once the connection closes.)
|
||||
* - `WebSocket.CONNECTING` is `0`, the connection is pending.
|
||||
* - `WebSocket.OPEN` is `1`, the connection is established and `send()` is possible.
|
||||
* - `WebSocket.CLOSING` is `2`, the connection is closing.
|
||||
* - `WebSocket.CLOSED` is `3`, the connection is closed or couldn't be opened.
|
||||
*/
|
||||
readonly readyState: WebSocketReadyState;
|
||||
|
||||
/**
|
||||
* The resolved URL that established the connection.
|
||||
*/
|
||||
readonly url: string;
|
||||
|
||||
/**
|
||||
* The number of bytes that are queued, but not yet sent.
|
||||
*
|
||||
* When the connection is closed, the value is not reset to zero.
|
||||
*/
|
||||
readonly bufferedAmount: number;
|
||||
/** Returns the extensions selected by the server, if any. */
|
||||
readonly extensions: string;
|
||||
onclose: ((this: WebSocket, ev: CloseEvent) => any) | null;
|
||||
onerror: ((this: WebSocket, ev: Event) => any) | null;
|
||||
onmessage:
|
||||
| ((this: WebSocket, ev: WebSocketEventMap["message"]) => any)
|
||||
| null;
|
||||
onopen: ((this: WebSocket, ev: Event) => any) | null;
|
||||
/** Returns the subprotocol selected by the server, if any. It can be used in conjunction with the array form of the constructor's second argument to perform subprotocol negotiation. */
|
||||
|
||||
/**
|
||||
* The protocol selected by the server, if any, otherwise empty.
|
||||
*/
|
||||
readonly protocol: string;
|
||||
/** Returns the state of the WebSocket object's connection. It can have the values described below. */
|
||||
readonly readyState: number;
|
||||
/** Returns the URL that was used to establish the WebSocket connection. */
|
||||
readonly url: string;
|
||||
/** Closes the WebSocket connection, optionally using code as the the WebSocket connection close code and reason as the the WebSocket connection close reason. */
|
||||
close(code?: number, reason?: string): void;
|
||||
/** Transmits data using the WebSocket connection. data can be a string, an ArrayBuffer, or an BufferSource. */
|
||||
send(data: string | ArrayBufferLike | BufferSource): void;
|
||||
readonly CLOSED: number;
|
||||
readonly CLOSING: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
addEventListener<K extends keyof WebSocketEventMap>(
|
||||
type: K,
|
||||
listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any,
|
||||
|
||||
/**
|
||||
* The extensions selected by the server, if any, otherwise empty.
|
||||
*/
|
||||
readonly extensions: string;
|
||||
|
||||
/**
|
||||
* Sets the event handler for `open` events.
|
||||
*
|
||||
* If you need multiple event handlers, use `addEventListener("open")` instead.
|
||||
*/
|
||||
onopen: ((this: WebSocket, ev: Event) => unknown) | null;
|
||||
|
||||
/**
|
||||
* Sets the event handler for `close` events.
|
||||
*
|
||||
* If you need multiple event handlers, use `addEventListener("close")` instead.
|
||||
*/
|
||||
onclose: ((this: WebSocket, event: CloseEvent) => unknown) | null;
|
||||
|
||||
/**
|
||||
* Sets the event handler for `message` events.
|
||||
*
|
||||
* If you need multiple event handlers, use `addEventListener("message")` instead.
|
||||
*/
|
||||
onmessage:
|
||||
| ((this: WebSocket, event: MessageEvent<string | Buffer>) => unknown)
|
||||
| null;
|
||||
|
||||
/**
|
||||
* Sets the event handler for `error` events.
|
||||
*
|
||||
* If you need multiple event handlers, use `addEventListener("error")` instead.
|
||||
*/
|
||||
onerror: ((this: WebSocket, event: Event) => unknown) | null;
|
||||
|
||||
addEventListener<T extends keyof WebSocketEventMap>(
|
||||
type: T,
|
||||
listener: (this: WebSocket, event: WebSocketEventMap[T]) => unknown,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
listener: (this: WebSocket, event: Event) => unknown,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
removeEventListener<K extends keyof WebSocketEventMap>(
|
||||
type: K,
|
||||
listener: (this: WebSocket, ev: WebSocketEventMap[K]) => any,
|
||||
|
||||
removeEventListener<T extends keyof WebSocketEventMap>(
|
||||
type: T,
|
||||
listener: (this: WebSocket, event: WebSocketEventMap[T]) => unknown,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
listener: (this: WebSocket, event: Event) => unknown,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A client that makes an outgoing WebSocket connection.
|
||||
*
|
||||
* @see https://developer.mozilla.org/en-US/docs/Web/API/WebSocket
|
||||
* @example
|
||||
* const ws = new WebSocket("wss://ws.postman-echo.com/raw");
|
||||
*
|
||||
* ws.addEventListener("open", () => {
|
||||
* console.log("Connected");
|
||||
* });
|
||||
* ws.addEventListener("message", ({ data }) => {
|
||||
* console.log("Received:", data); // string or Buffer
|
||||
* });
|
||||
* ws.addEventListener("close", ({ code, reason }) => {
|
||||
* console.log("Disconnected:", code, reason);
|
||||
* });
|
||||
*/
|
||||
declare var WebSocket: {
|
||||
prototype: WebSocket;
|
||||
|
||||
new (url: string | URL, protocols?: string | string[]): WebSocket;
|
||||
|
||||
new (
|
||||
url: string | URL,
|
||||
options: {
|
||||
/**
|
||||
* An object specifying connection headers
|
||||
*
|
||||
* This is a Bun-specific extension.
|
||||
* Sets the headers when establishing a connection.
|
||||
*/
|
||||
headers?: HeadersInit;
|
||||
/**
|
||||
* A string specifying the subprotocols the server is willing to accept.
|
||||
* Sets the sub-protocol the client is willing to accept.
|
||||
*/
|
||||
protocol?: string;
|
||||
/**
|
||||
* A string array specifying the subprotocols the server is willing to accept.
|
||||
* Sets the sub-protocols the client is willing to accept.
|
||||
*/
|
||||
protocols?: string[];
|
||||
},
|
||||
): WebSocket;
|
||||
readonly CLOSED: number;
|
||||
readonly CLOSING: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
|
||||
/**
|
||||
* The connection is pending.
|
||||
*/
|
||||
readonly CONNECTING: 0;
|
||||
|
||||
/**
|
||||
* The connection is established and `send()` is possible.
|
||||
*/
|
||||
readonly OPEN: 1;
|
||||
|
||||
/**
|
||||
* The connection is closing.
|
||||
*/
|
||||
readonly CLOSING: 2;
|
||||
|
||||
/**
|
||||
* The connection is closed or couldn't be opened.
|
||||
*/
|
||||
readonly CLOSED: 3;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -1945,7 +2238,7 @@ interface URLSearchParams {
|
||||
): void;
|
||||
/** Returns a string containing a query string suitable for use in a URL. Does not include the question mark. */
|
||||
toString(): string;
|
||||
[Symbol.iterator](): IterableIterator<[string, FormDataEntryValue]>;
|
||||
[Symbol.iterator](): IterableIterator<[string, string]>;
|
||||
}
|
||||
|
||||
declare var URLSearchParams: {
|
||||
@@ -1990,6 +2283,8 @@ interface FetchEvent extends Event {
|
||||
|
||||
interface EventMap {
|
||||
fetch: FetchEvent;
|
||||
message: MessageEvent;
|
||||
messageerror: MessageEvent;
|
||||
// exit: Event;
|
||||
}
|
||||
|
||||
@@ -2261,7 +2556,8 @@ declare var ReadableStreamDefaultController: {
|
||||
interface ReadableStreamDefaultReader<R = any>
|
||||
extends ReadableStreamGenericReader {
|
||||
read(): Promise<ReadableStreamDefaultReadResult<R>>;
|
||||
readMany(): Promise<ReadableStreamDefaultReadValueResult<R>>;
|
||||
/** Only available in Bun. If there are multiple chunks in the queue, this will return all of them at the same time. */
|
||||
readMany(): Promise<ReadableStreamDefaultReadManyResult<R>>;
|
||||
releaseLock(): void;
|
||||
}
|
||||
|
||||
@@ -3197,3 +3493,154 @@ declare module "*.txt" {
|
||||
var text: string;
|
||||
export = text;
|
||||
}
|
||||
|
||||
interface EventSourceEventMap {
|
||||
error: Event;
|
||||
message: MessageEvent;
|
||||
open: Event;
|
||||
}
|
||||
|
||||
interface Worker extends EventTarget {
|
||||
onerror: ((this: Worker, ev: ErrorEvent) => any) | null;
|
||||
onmessage: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
onmessageerror: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
|
||||
addEventListener<K extends keyof WorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: Worker, ev: WorkerEventMap[K]) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
|
||||
removeEventListener<K extends keyof WorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: Worker, ev: WorkerEventMap[K]) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
|
||||
terminate(): void;
|
||||
|
||||
postMessage(message: any, transfer?: Transferable[]): void;
|
||||
|
||||
/**
|
||||
* Keep the process alive until the worker is terminated or `unref`'d
|
||||
*/
|
||||
ref(): void;
|
||||
/**
|
||||
* Undo a previous `ref()`
|
||||
*/
|
||||
unref(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a message to the parent thread.
|
||||
*
|
||||
* Only useful in a worker thread; calling this from the main thread does nothing.
|
||||
*/
|
||||
declare function postMessage(message: any, transfer?: Transferable[]): void;
|
||||
|
||||
declare var Worker: {
|
||||
prototype: Worker;
|
||||
new (stringUrl: string | URL, options?: WorkerOptions): Worker;
|
||||
};
|
||||
|
||||
interface WorkerOptions {
|
||||
name?: string;
|
||||
bun?: {
|
||||
/**
|
||||
* Use less memory, but make the worker slower.
|
||||
*
|
||||
* Internally, this sets the heap size configuration in JavaScriptCore to be
|
||||
* the small heap instead of the large heap.
|
||||
*/
|
||||
smol?: boolean;
|
||||
|
||||
/**
|
||||
* When `true`, the worker will keep the parent thread alive until the worker is terminated or `unref`'d.
|
||||
* When `false`, the worker will not keep the parent thread alive.
|
||||
*
|
||||
* By default, this is `false`.
|
||||
*/
|
||||
ref?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface WorkerEventMap {
|
||||
message: MessageEvent;
|
||||
messageerror: MessageEvent;
|
||||
error: ErrorEvent;
|
||||
open: Event;
|
||||
close: Event;
|
||||
}
|
||||
|
||||
interface EventSource extends EventTarget {
|
||||
onerror: ((this: EventSource, ev: ErrorEvent) => any) | null;
|
||||
onmessage: ((this: EventSource, ev: MessageEvent) => any) | null;
|
||||
onopen: ((this: EventSource, ev: Event) => any) | null;
|
||||
/** Returns the state of this EventSource object's connection. It can have the values described below. */
|
||||
readonly readyState: number;
|
||||
/** Returns the URL providing the event stream. */
|
||||
readonly url: string;
|
||||
/** Returns true if the credentials mode for connection requests to the URL providing the event stream is set to "include", and false otherwise.
|
||||
*
|
||||
* Not supported in Bun
|
||||
*
|
||||
*/
|
||||
readonly withCredentials: boolean;
|
||||
/** Aborts any instances of the fetch algorithm started for this EventSource object, and sets the readyState attribute to CLOSED. */
|
||||
close(): void;
|
||||
readonly CLOSED: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
addEventListener<K extends keyof EventSourceEventMap>(
|
||||
type: K,
|
||||
listener: (this: EventSource, ev: EventSourceEventMap[K]) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: (this: EventSource, event: MessageEvent) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
removeEventListener<K extends keyof EventSourceEventMap>(
|
||||
type: K,
|
||||
listener: (this: EventSource, ev: EventSourceEventMap[K]) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: (this: EventSource, event: MessageEvent) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Keep the event loop alive while connection is open or reconnecting
|
||||
*
|
||||
* Not available in browsers
|
||||
*/
|
||||
ref(): void;
|
||||
|
||||
/**
|
||||
* Do not keep the event loop alive while connection is open or reconnecting
|
||||
*
|
||||
* Not available in browsers
|
||||
*/
|
||||
unref(): void;
|
||||
}
|
||||
|
||||
declare var EventSource: {
|
||||
prototype: EventSource;
|
||||
new (url: string | URL, eventSourceInitDict?: EventSourceInit): EventSource;
|
||||
readonly CLOSED: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
};
|
||||
|
||||
20
packages/bun-types/http.d.ts
vendored
20
packages/bun-types/http.d.ts
vendored
@@ -987,7 +987,7 @@ declare module "http" {
|
||||
* in the response to be dropped and the socket to be destroyed.
|
||||
* @deprecated Since v14.1.0,v13.14.0 - Use `destroy` instead.
|
||||
*/
|
||||
// abort(): void;
|
||||
abort(): void;
|
||||
/**
|
||||
* Once a socket is assigned to this request and is connected `socket.setTimeout()` will be called.
|
||||
* @param timeout Milliseconds before a request times out.
|
||||
@@ -1785,6 +1785,24 @@ declare module "http" {
|
||||
callback?: (res: IncomingMessage) => void,
|
||||
): ClientRequest;
|
||||
|
||||
/**
|
||||
* Performs the low-level validations on the provided name that are done when `res.setHeader(name, value)` is called.
|
||||
* Passing illegal value as name will result in a TypeError being thrown, identified by `code: 'ERR_INVALID_HTTP_TOKEN'`.
|
||||
* @param name Header name
|
||||
* @since v14.3.0
|
||||
*/
|
||||
function validateHeaderName(name: string): void;
|
||||
/**
|
||||
* Performs the low-level validations on the provided value that are done when `res.setHeader(name, value)` is called.
|
||||
* Passing illegal value as value will result in a TypeError being thrown.
|
||||
* - Undefined value error is identified by `code: 'ERR_HTTP_INVALID_HEADER_VALUE'`.
|
||||
* - Invalid value character error is identified by `code: 'ERR_INVALID_CHAR'`.
|
||||
* @param name Header name
|
||||
* @param value Header value
|
||||
* @since v14.3.0
|
||||
*/
|
||||
function validateHeaderValue(name: string, value: string): void;
|
||||
|
||||
let globalAgent: Agent;
|
||||
|
||||
/**
|
||||
|
||||
36
packages/bun-types/jsc.d.ts
vendored
36
packages/bun-types/jsc.d.ts
vendored
@@ -39,6 +39,42 @@ declare module "bun:jsc" {
|
||||
export function reoptimizationRetryCount(func: Function): number;
|
||||
export function drainMicrotasks(): void;
|
||||
|
||||
/**
|
||||
* Convert a JavaScript value to a binary representation that can be sent to another Bun instance.
|
||||
*
|
||||
* Internally, this uses the serialization format from WebKit/Safari.
|
||||
*
|
||||
* @param value A JavaScript value, usually an object or array, to be converted.
|
||||
* @returns A SharedArrayBuffer that can be sent to another Bun instance.
|
||||
*
|
||||
*/
|
||||
export function serialize(
|
||||
value: any,
|
||||
options?: { binaryType?: "arraybuffer" },
|
||||
): SharedArrayBuffer;
|
||||
|
||||
/**
|
||||
* Convert a JavaScript value to a binary representation that can be sent to another Bun instance.
|
||||
*
|
||||
* Internally, this uses the serialization format from WebKit/Safari.
|
||||
*
|
||||
* @param value A JavaScript value, usually an object or array, to be converted.
|
||||
* @returns A Buffer that can be sent to another Bun instance.
|
||||
*/
|
||||
export function serialize(
|
||||
value: any,
|
||||
options?: { binaryType: "nodebuffer" },
|
||||
): Buffer;
|
||||
|
||||
/**
|
||||
* Convert an ArrayBuffer or Buffer to a JavaScript value compatible with the HTML Structured Clone Algorithm.
|
||||
*
|
||||
* @param value A serialized value, usually an ArrayBuffer or Buffer, to be converted.
|
||||
*/
|
||||
export function deserialize(
|
||||
value: ArrayBufferLike | TypedArray | Buffer,
|
||||
): any;
|
||||
|
||||
/**
|
||||
* Set the timezone used by Intl, Date, etc.
|
||||
*
|
||||
|
||||
26
packages/bun-types/perf_hooks.d.ts
vendored
26
packages/bun-types/perf_hooks.d.ts
vendored
@@ -452,19 +452,19 @@ declare module "perf_hooks" {
|
||||
// },
|
||||
// ): void;
|
||||
// }
|
||||
// namespace constants {
|
||||
// const NODE_PERFORMANCE_GC_MAJOR: number;
|
||||
// const NODE_PERFORMANCE_GC_MINOR: number;
|
||||
// const NODE_PERFORMANCE_GC_INCREMENTAL: number;
|
||||
// const NODE_PERFORMANCE_GC_WEAKCB: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_NO: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_FORCED: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number;
|
||||
// const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number;
|
||||
// }
|
||||
namespace constants {
|
||||
const NODE_PERFORMANCE_GC_MAJOR: number;
|
||||
const NODE_PERFORMANCE_GC_MINOR: number;
|
||||
const NODE_PERFORMANCE_GC_INCREMENTAL: number;
|
||||
const NODE_PERFORMANCE_GC_WEAKCB: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_NO: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_FORCED: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number;
|
||||
const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number;
|
||||
}
|
||||
const performance: Performance;
|
||||
// interface EventLoopMonitorOptions {
|
||||
// /**
|
||||
|
||||
@@ -75,6 +75,8 @@ const tsConfig = {
|
||||
skipLibCheck: true,
|
||||
jsx: "react-jsx",
|
||||
allowImportingTsExtensions: true,
|
||||
emitDeclarationOnly: true,
|
||||
composite: true,
|
||||
allowSyntheticDefaultImports: true,
|
||||
forceConsistentCasingInFileNames: true,
|
||||
allowJs: true,
|
||||
|
||||
10
packages/bun-types/sqlite.d.ts
vendored
10
packages/bun-types/sqlite.d.ts
vendored
@@ -235,6 +235,7 @@ declare module "bun:sqlite" {
|
||||
ParamsType extends SQLQueryBindings | SQLQueryBindings[],
|
||||
>(
|
||||
sqlQuery: string,
|
||||
params?: ParamsType
|
||||
): Statement<
|
||||
ReturnType,
|
||||
ParamsType extends Array<any> ? ParamsType : [ParamsType]
|
||||
@@ -579,7 +580,9 @@ declare module "bun:sqlite" {
|
||||
/**
|
||||
* Execute the prepared statement and return the results as an array of arrays.
|
||||
*
|
||||
* This is a little faster than {@link all}.
|
||||
* In Bun v0.6.7 and earlier, this method returned `null` if there were no
|
||||
* results instead of `[]`. This was changed in v0.6.8 to align
|
||||
* more with what people expect.
|
||||
*
|
||||
* @param params optional values to bind to the statement. If omitted, the statement is run with the last bound values or no parameters if there are none.
|
||||
*
|
||||
@@ -595,12 +598,15 @@ declare module "bun:sqlite" {
|
||||
*
|
||||
* stmt.values("foo");
|
||||
* // => [['foo']]
|
||||
*
|
||||
* stmt.values("not-found");
|
||||
* // => []
|
||||
* ```
|
||||
*
|
||||
* The following types can be used when binding parameters:
|
||||
*
|
||||
* | JavaScript type | SQLite type |
|
||||
* | -------------- | ----------- |
|
||||
* | ---------------|-------------|
|
||||
* | `string` | `TEXT` |
|
||||
* | `number` | `INTEGER` or `DECIMAL` |
|
||||
* | `boolean` | `INTEGER` (1 or 0) |
|
||||
|
||||
151
packages/bun-types/stream.d.ts
vendored
151
packages/bun-types/stream.d.ts
vendored
@@ -46,22 +46,7 @@ declare module "stream" {
|
||||
encoding?: BufferEncoding | undefined;
|
||||
read?(this: Readable, size: number): void;
|
||||
}
|
||||
class Readable<R = any> extends Stream implements ReadableStream {
|
||||
// TODO: improve type later
|
||||
values: any;
|
||||
|
||||
readonly locked: boolean;
|
||||
cancel(reason?: any): Promise<void>;
|
||||
getReader(): ReadableStreamDefaultReader<R>;
|
||||
pipeThrough<T>(
|
||||
transform: ReadableWritablePair<T, R>,
|
||||
options?: StreamPipeOptions,
|
||||
): ReadableStream<T>;
|
||||
pipeTo(
|
||||
destination: WritableStream<R>,
|
||||
options?: StreamPipeOptions,
|
||||
): Promise<void>;
|
||||
tee(): [ReadableStream<R>, ReadableStream<R>];
|
||||
class Readable<R = any> extends Stream {
|
||||
forEach(
|
||||
callbackfn: (
|
||||
value: any,
|
||||
@@ -71,12 +56,23 @@ declare module "stream" {
|
||||
thisArg?: any,
|
||||
): void;
|
||||
/**
|
||||
* A utility method for creating Readable Streams out of iterators.
|
||||
* A utility method for creating a `Readable` from a web `ReadableStream`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static from(
|
||||
iterable: Iterable<any> | AsyncIterable<any>,
|
||||
options?: ReadableOptions,
|
||||
static fromWeb(
|
||||
readableStream: ReadableStream,
|
||||
options?: Pick<
|
||||
ReadableOptions,
|
||||
"encoding" | "highWaterMark" | "objectMode" | "signal"
|
||||
>,
|
||||
): Readable;
|
||||
/**
|
||||
* A utility method for creating a web `ReadableStream` from a `Readable`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static toWeb(streamReadable: Readable): ReadableStream;
|
||||
/**
|
||||
* Returns whether the stream has been read from or cancelled.
|
||||
*/
|
||||
@@ -512,11 +508,25 @@ declare module "stream" {
|
||||
): void;
|
||||
final?(this: Writable, callback: (error?: Error | null) => void): void;
|
||||
}
|
||||
class Writable<W = any> extends Stream implements WritableStream {
|
||||
readonly locked: boolean;
|
||||
abort(reason?: any): Promise<void>;
|
||||
close(): Promise<void>;
|
||||
getWriter(): WritableStreamDefaultWriter<W>;
|
||||
class Writable<W = any> extends Stream {
|
||||
/**
|
||||
* A utility method for creating a `Writable` from a web `WritableStream`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static fromWeb(
|
||||
writableStream: WritableStream,
|
||||
options?: Pick<
|
||||
WritableOptions,
|
||||
"decodeStrings" | "highWaterMark" | "objectMode" | "signal"
|
||||
>,
|
||||
): Writable;
|
||||
/**
|
||||
* A utility method for creating a web `WritableStream` from a `Writable`.
|
||||
* @since v17.0.0
|
||||
* @experimental
|
||||
*/
|
||||
static toWeb(streamWritable: Writable): WritableStream;
|
||||
/**
|
||||
* Is `true` if it is safe to call `writable.write()`, which means
|
||||
* the stream has not been destroyed, errored or ended.
|
||||
@@ -634,11 +644,11 @@ declare module "stream" {
|
||||
* @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
||||
*/
|
||||
write(
|
||||
chunk: any,
|
||||
chunk: W,
|
||||
callback?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
write(
|
||||
chunk: any,
|
||||
chunk: W,
|
||||
encoding: BufferEncoding,
|
||||
callback?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
@@ -851,28 +861,21 @@ declare module "stream" {
|
||||
* * `zlib streams`
|
||||
* * `crypto streams`
|
||||
*/
|
||||
class Duplex extends Readable implements Writable {
|
||||
readonly writable: boolean;
|
||||
readonly writableEnded: boolean;
|
||||
readonly writableFinished: boolean;
|
||||
readonly writableHighWaterMark: number;
|
||||
readonly writableLength: number;
|
||||
readonly writableObjectMode: boolean;
|
||||
readonly writableCorked: number;
|
||||
/**
|
||||
* If `false` then the stream will automatically end the writable side when the
|
||||
* readable side ends. Set initially by the `allowHalfOpen` constructor option,
|
||||
* which defaults to `false`.
|
||||
*
|
||||
* This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
|
||||
* emitted.
|
||||
* @since v0.9.4
|
||||
*/
|
||||
allowHalfOpen: boolean;
|
||||
constructor(opts?: DuplexOptions);
|
||||
abort(reason?: any): Promise<void>;
|
||||
close(): Promise<void>;
|
||||
getWriter(): WritableStreamDefaultWriter<any>;
|
||||
type Duplex<R = any> = Readable<R> &
|
||||
Writable<R> & {
|
||||
/**
|
||||
* If `false` then the stream will automatically end the writable side when the
|
||||
* readable side ends. Set initially by the `allowHalfOpen` constructor option,
|
||||
* which defaults to `false`.
|
||||
*
|
||||
* This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
|
||||
* emitted.
|
||||
* @since v0.9.4
|
||||
*/
|
||||
allowHalfOpen: boolean;
|
||||
};
|
||||
interface DuplexConstructor {
|
||||
new <T = any>(opts?: DuplexOptions): Duplex<T>;
|
||||
/**
|
||||
* A utility method for creating duplex streams.
|
||||
*
|
||||
@@ -894,7 +897,7 @@ declare module "stream" {
|
||||
*
|
||||
* @since v16.8.0
|
||||
*/
|
||||
static from(
|
||||
from(
|
||||
src:
|
||||
| Stream
|
||||
| Blob
|
||||
@@ -906,39 +909,19 @@ declare module "stream" {
|
||||
| Promise<any>
|
||||
| Object,
|
||||
): Duplex;
|
||||
_write(
|
||||
chunk: any,
|
||||
encoding: BufferEncoding,
|
||||
callback: (error?: Error | null) => void,
|
||||
): void;
|
||||
_writev?(
|
||||
chunks: Array<{
|
||||
chunk: any;
|
||||
encoding: BufferEncoding;
|
||||
}>,
|
||||
callback: (error?: Error | null) => void,
|
||||
): void;
|
||||
_destroy(
|
||||
error: Error | null,
|
||||
callback: (error: Error | null) => void,
|
||||
): void;
|
||||
_final(callback: (error?: Error | null) => void): void;
|
||||
write(
|
||||
chunk: any,
|
||||
encoding?: BufferEncoding,
|
||||
cb?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
write(
|
||||
chunk: any,
|
||||
cb?: (error: Error | null | undefined) => void,
|
||||
): boolean;
|
||||
setDefaultEncoding(encoding: BufferEncoding): this;
|
||||
end(cb?: () => void): this;
|
||||
end(chunk: any, cb?: () => void): this;
|
||||
end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this;
|
||||
cork(): void;
|
||||
uncork(): void;
|
||||
fromWeb<T = any>(
|
||||
pair: {
|
||||
readable: ReadableStream<T>;
|
||||
writable: WritableStream<T>;
|
||||
},
|
||||
options: DuplexOptions,
|
||||
): Duplex<T>;
|
||||
toWeb<T>(stream: Duplex<T>): {
|
||||
readable: ReadableStream<T>;
|
||||
writable: WritableStream<T>;
|
||||
};
|
||||
}
|
||||
var Duplex: DuplexConstructor;
|
||||
type TransformCallback = (error?: Error | null, data?: any) => void;
|
||||
interface TransformOptions extends DuplexOptions {
|
||||
construct?(
|
||||
@@ -985,7 +968,7 @@ declare module "stream" {
|
||||
* * `crypto streams`
|
||||
* @since v0.9.4
|
||||
*/
|
||||
class Transform extends Duplex {
|
||||
class Transform<T = any> extends Duplex<T> {
|
||||
constructor(opts?: TransformOptions);
|
||||
_transform(
|
||||
chunk: any,
|
||||
@@ -998,7 +981,7 @@ declare module "stream" {
|
||||
* The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is
|
||||
* primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams.
|
||||
*/
|
||||
class PassThrough extends Transform {}
|
||||
class PassThrough<T = any> extends Transform<T> {}
|
||||
/**
|
||||
* Attaches an AbortSignal to a readable or writeable stream. This lets code
|
||||
* control stream destruction using an `AbortController`.
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
import { watch } from "node:fs";
|
||||
import * as tsd from "tsd";
|
||||
import * as fs from "fs";
|
||||
import { exists } from "fs/promises";
|
||||
|
||||
tsd.expectType<Promise<boolean>>(exists("/etc/passwd"));
|
||||
tsd.expectType<Promise<boolean>>(fs.promises.exists("/etc/passwd"));
|
||||
|
||||
// file path
|
||||
watch(".", (eventType, filename) => {
|
||||
console.log(`event type = ${eventType}`);
|
||||
if (filename) {
|
||||
console.log(`filename = ${filename}`);
|
||||
}
|
||||
});
|
||||
|
||||
31
packages/bun-types/tests/mocks.test-d.ts
Normal file
31
packages/bun-types/tests/mocks.test-d.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { expectType } from "tsd";
|
||||
import { mock, jest } from "bun:test";
|
||||
|
||||
const mock1 = mock((arg: string) => {
|
||||
return arg.length;
|
||||
});
|
||||
|
||||
const arg1 = mock1("1");
|
||||
expectType<number>(arg1);
|
||||
mock;
|
||||
|
||||
type arg2 = jest.Spied<() => string>;
|
||||
declare var arg2: arg2;
|
||||
arg2.mock.calls[0];
|
||||
mock;
|
||||
|
||||
// @ts-expect-error
|
||||
jest.fn<() => Promise<string>>().mockReturnValue("asdf");
|
||||
// @ts-expect-error
|
||||
jest.fn<() => string>().mockReturnValue(24);
|
||||
jest.fn<() => string>().mockReturnValue("24");
|
||||
|
||||
jest.fn<() => Promise<string>>().mockResolvedValue("asdf");
|
||||
// @ts-expect-error
|
||||
jest.fn<() => string>().mockResolvedValue(24);
|
||||
// @ts-expect-error
|
||||
jest.fn<() => string>().mockResolvedValue("24");
|
||||
|
||||
jest.fn().mockClear();
|
||||
jest.fn().mockReset();
|
||||
jest.fn().mockRejectedValueOnce(new Error());
|
||||
51
packages/bun-types/tests/process.test-d.ts
Normal file
51
packages/bun-types/tests/process.test-d.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
process.memoryUsage();
|
||||
process.cpuUsage().system;
|
||||
process.cpuUsage().user;
|
||||
process.on("SIGINT", () => {
|
||||
console.log("Interrupt from keyboard");
|
||||
});
|
||||
|
||||
process.on("beforeExit", code => {
|
||||
console.log("Event loop is empty and no work is left to schedule.", code);
|
||||
});
|
||||
|
||||
process.on("exit", code => {
|
||||
console.log("Exiting with code:", code);
|
||||
});
|
||||
process.kill(123, "SIGTERM");
|
||||
|
||||
process.getegid();
|
||||
process.geteuid();
|
||||
process.getgid();
|
||||
process.getgroups();
|
||||
process.getuid();
|
||||
|
||||
process.once("SIGINT", () => {
|
||||
console.log("Interrupt from keyboard");
|
||||
});
|
||||
|
||||
process.reallyExit();
|
||||
|
||||
process.assert(false, "PleAsE don't Use THIs It IS dEpReCATED");
|
||||
|
||||
console.log(process.allowedNodeEnvironmentFlags);
|
||||
// console.log(process.channel);
|
||||
// console.log(process.connected);
|
||||
// console.log(process.constrainedMemory);
|
||||
console.log(process.debugPort);
|
||||
// console.log(process.disconnect);
|
||||
// console.log(process.getActiveResourcesInfo);
|
||||
// console.log(process.setActiveResourcesInfo);
|
||||
// console.log(process.setuid);
|
||||
// console.log(process.setgid);
|
||||
// console.log(process.setegid);
|
||||
// console.log(process.seteuid);
|
||||
// console.log(process.setgroups);
|
||||
// console.log(process.hasUncaughtExceptionCaptureCallback);
|
||||
// console.log(process.initGroups);
|
||||
console.log(process.listenerCount("exit"));
|
||||
console.log(process.memoryUsage());
|
||||
// console.log(process.report);
|
||||
// console.log(process.resourceUsage);
|
||||
// console.log(process.setSourceMapsEnabled());
|
||||
// console.log(process.send);
|
||||
@@ -39,7 +39,8 @@ type User = {
|
||||
|
||||
Bun.serve<User>({
|
||||
fetch(req, server) {
|
||||
if (req.url === "/chat") {
|
||||
const url = new URL(req.url);
|
||||
if (url.pathname === "/chat") {
|
||||
if (
|
||||
server.upgrade(req, {
|
||||
data: {
|
||||
@@ -78,4 +79,27 @@ Bun.serve<User>({
|
||||
},
|
||||
});
|
||||
|
||||
Bun.serve({
|
||||
fetch(req) {
|
||||
throw new Error("woops!");
|
||||
},
|
||||
error(error) {
|
||||
return new Response(`<pre>${error}\n${error.stack}</pre>`, {
|
||||
headers: {
|
||||
"Content-Type": "text/html",
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export {};
|
||||
|
||||
Bun.serve({
|
||||
port: 1234,
|
||||
fetch(req, server) {
|
||||
server.upgrade(req);
|
||||
if (Math.random() > 0.5) return undefined;
|
||||
return new Response();
|
||||
},
|
||||
websocket: { message() {} },
|
||||
});
|
||||
|
||||
4
packages/bun-types/timers.d.ts
vendored
4
packages/bun-types/timers.d.ts
vendored
@@ -11,8 +11,8 @@
|
||||
|
||||
declare module "timers" {
|
||||
class Timer {
|
||||
ref(): void;
|
||||
unref(): void;
|
||||
ref(): Timer;
|
||||
unref(): Timer;
|
||||
hasRef(): boolean;
|
||||
}
|
||||
|
||||
|
||||
287
src/ArenaAllocator.zig
Normal file
287
src/ArenaAllocator.zig
Normal file
@@ -0,0 +1,287 @@
|
||||
/// TODO: delete this once we've upgraded Zig and https://github.com/ziglang/zig/pull/15985 is merged.
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// This allocator takes an existing allocator, wraps it, and provides an interface
|
||||
/// where you can allocate without freeing, and then free it all together.
|
||||
pub const ArenaAllocator = struct {
|
||||
child_allocator: Allocator,
|
||||
state: State,
|
||||
|
||||
/// Inner state of ArenaAllocator. Can be stored rather than the entire ArenaAllocator
|
||||
/// as a memory-saving optimization.
|
||||
pub const State = struct {
|
||||
buffer_list: std.SinglyLinkedList(usize) = .{},
|
||||
end_index: usize = 0,
|
||||
|
||||
pub fn promote(self: State, child_allocator: Allocator) ArenaAllocator {
|
||||
return .{
|
||||
.child_allocator = child_allocator,
|
||||
.state = self,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn allocator(self: *ArenaAllocator) Allocator {
|
||||
return .{
|
||||
.ptr = self,
|
||||
.vtable = &.{
|
||||
.alloc = alloc,
|
||||
.resize = resize,
|
||||
.free = free,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const BufNode = std.SinglyLinkedList(usize).Node;
|
||||
|
||||
pub fn init(child_allocator: Allocator) ArenaAllocator {
|
||||
return (State{}).promote(child_allocator);
|
||||
}
|
||||
|
||||
pub fn deinit(self: ArenaAllocator) void {
|
||||
// NOTE: When changing this, make sure `reset()` is adjusted accordingly!
|
||||
|
||||
var it = self.state.buffer_list.first;
|
||||
while (it) |node| {
|
||||
// this has to occur before the free because the free frees node
|
||||
const next_it = node.next;
|
||||
const align_bits = std.math.log2_int(usize, @alignOf(BufNode));
|
||||
const alloc_buf = @as([*]u8, @ptrCast(node))[0..node.data];
|
||||
self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress());
|
||||
it = next_it;
|
||||
}
|
||||
}
|
||||
|
||||
pub const ResetMode = union(enum) {
|
||||
/// Releases all allocated memory in the arena.
|
||||
free_all,
|
||||
/// This will pre-heat the arena for future allocations by allocating a
|
||||
/// large enough buffer for all previously done allocations.
|
||||
/// Preheating will speed up the allocation process by invoking the backing allocator
|
||||
/// less often than before. If `reset()` is used in a loop, this means that after the
|
||||
/// biggest operation, no memory allocations are performed anymore.
|
||||
retain_capacity,
|
||||
/// This is the same as `retain_capacity`, but the memory will be shrunk to
|
||||
/// this value if it exceeds the limit.
|
||||
retain_with_limit: usize,
|
||||
};
|
||||
/// Queries the current memory use of this arena.
|
||||
/// This will **not** include the storage required for internal keeping.
|
||||
pub fn queryCapacity(self: ArenaAllocator) usize {
|
||||
var size: usize = 0;
|
||||
var it = self.state.buffer_list.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
// Compute the actually allocated size excluding the
|
||||
// linked list node.
|
||||
size += node.data - @sizeOf(BufNode);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
/// Resets the arena allocator and frees all allocated memory.
|
||||
///
|
||||
/// `mode` defines how the currently allocated memory is handled.
|
||||
/// See the variant documentation for `ResetMode` for the effects of each mode.
|
||||
///
|
||||
/// The function will return whether the reset operation was successful or not.
|
||||
/// If the reallocation failed `false` is returned. The arena will still be fully
|
||||
/// functional in that case, all memory is released. Future allocations just might
|
||||
/// be slower.
|
||||
///
|
||||
/// NOTE: If `mode` is `free_mode`, the function will always return `true`.
|
||||
pub fn reset(self: *ArenaAllocator, mode: ResetMode) bool {
|
||||
// Some words on the implementation:
|
||||
// The reset function can be implemented with two basic approaches:
|
||||
// - Counting how much bytes were allocated since the last reset, and storing that
|
||||
// information in State. This will make reset fast and alloc only a teeny tiny bit
|
||||
// slower.
|
||||
// - Counting how much bytes were allocated by iterating the chunk linked list. This
|
||||
// will make reset slower, but alloc() keeps the same speed when reset() as if reset()
|
||||
// would not exist.
|
||||
//
|
||||
// The second variant was chosen for implementation, as with more and more calls to reset(),
|
||||
// the function will get faster and faster. At one point, the complexity of the function
|
||||
// will drop to amortized O(1), as we're only ever having a single chunk that will not be
|
||||
// reallocated, and we're not even touching the backing allocator anymore.
|
||||
//
|
||||
// Thus, only the first hand full of calls to reset() will actually need to iterate the linked
|
||||
// list, all future calls are just taking the first node, and only resetting the `end_index`
|
||||
// value.
|
||||
const requested_capacity = switch (mode) {
|
||||
.retain_capacity => self.queryCapacity(),
|
||||
.retain_with_limit => |limit| @min(limit, self.queryCapacity()),
|
||||
.free_all => 0,
|
||||
};
|
||||
if (requested_capacity == 0) {
|
||||
// just reset when we don't have anything to reallocate
|
||||
self.deinit();
|
||||
self.state = State{};
|
||||
return true;
|
||||
}
|
||||
const total_size = requested_capacity + @sizeOf(BufNode);
|
||||
const align_bits = std.math.log2_int(usize, @alignOf(BufNode));
|
||||
// Free all nodes except for the last one
|
||||
var it = self.state.buffer_list.first;
|
||||
const maybe_first_node = while (it) |node| {
|
||||
// this has to occur before the free because the free frees node
|
||||
const next_it = node.next;
|
||||
if (next_it == null)
|
||||
break node;
|
||||
const alloc_buf = @as([*]u8, @ptrCast(node))[0..node.data];
|
||||
self.child_allocator.rawFree(alloc_buf, align_bits, @returnAddress());
|
||||
it = next_it;
|
||||
} else null;
|
||||
std.debug.assert(maybe_first_node == null or maybe_first_node.?.next == null);
|
||||
// reset the state before we try resizing the buffers, so we definitely have reset the arena to 0.
|
||||
self.state.end_index = 0;
|
||||
if (maybe_first_node) |first_node| {
|
||||
self.state.buffer_list.first = first_node;
|
||||
// perfect, no need to invoke the child_allocator
|
||||
if (first_node.data == total_size)
|
||||
return true;
|
||||
const first_alloc_buf = @as([*]u8, @ptrCast(first_node))[0..first_node.data];
|
||||
if (self.child_allocator.rawResize(first_alloc_buf, align_bits, total_size, @returnAddress())) {
|
||||
// successful resize
|
||||
first_node.data = total_size;
|
||||
} else {
|
||||
// manual realloc
|
||||
const new_ptr = self.child_allocator.rawAlloc(total_size, align_bits, @returnAddress()) orelse {
|
||||
// we failed to preheat the arena properly, signal this to the user.
|
||||
return false;
|
||||
};
|
||||
self.child_allocator.rawFree(first_alloc_buf, align_bits, @returnAddress());
|
||||
const node = @as(*BufNode, @ptrCast(@alignCast(new_ptr)));
|
||||
node.* = .{ .data = total_size };
|
||||
self.state.buffer_list.first = node;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) ?*BufNode {
|
||||
const actual_min_size = minimum_size + (@sizeOf(BufNode) + 16);
|
||||
const big_enough_len = prev_len + actual_min_size;
|
||||
const len = big_enough_len + big_enough_len / 2;
|
||||
const log2_align = comptime std.math.log2_int(usize, @alignOf(BufNode));
|
||||
const ptr = self.child_allocator.rawAlloc(len, log2_align, @returnAddress()) orelse
|
||||
return null;
|
||||
const buf_node = @as(*BufNode, @ptrCast(@alignCast(ptr)));
|
||||
buf_node.* = .{ .data = len };
|
||||
self.state.buffer_list.prepend(buf_node);
|
||||
self.state.end_index = 0;
|
||||
return buf_node;
|
||||
}
|
||||
|
||||
fn alloc(ctx: *anyopaque, n: usize, log2_ptr_align: u8, ra: usize) ?[*]u8 {
|
||||
const self = @as(*ArenaAllocator, @ptrCast(@alignCast(ctx)));
|
||||
_ = ra;
|
||||
|
||||
const ptr_align = @as(usize, 1) << @as(Allocator.Log2Align, @intCast(log2_ptr_align));
|
||||
var cur_node = if (self.state.buffer_list.first) |first_node|
|
||||
first_node
|
||||
else
|
||||
(self.createNode(0, n + ptr_align) orelse return null);
|
||||
while (true) {
|
||||
const cur_alloc_buf = @as([*]u8, @ptrCast(cur_node))[0..cur_node.data];
|
||||
const cur_buf = cur_alloc_buf[@sizeOf(BufNode)..];
|
||||
const addr = @intFromPtr(cur_buf.ptr) + self.state.end_index;
|
||||
const adjusted_addr = mem.alignForward(usize, addr, ptr_align);
|
||||
const adjusted_index = self.state.end_index + (adjusted_addr - addr);
|
||||
const new_end_index = adjusted_index + n;
|
||||
|
||||
if (new_end_index <= cur_buf.len) {
|
||||
const result = cur_buf[adjusted_index..new_end_index];
|
||||
self.state.end_index = new_end_index;
|
||||
return result.ptr;
|
||||
}
|
||||
|
||||
const bigger_buf_size = @sizeOf(BufNode) + new_end_index;
|
||||
const log2_align = comptime std.math.log2_int(usize, @alignOf(BufNode));
|
||||
if (self.child_allocator.rawResize(cur_alloc_buf, log2_align, bigger_buf_size, @returnAddress())) {
|
||||
cur_node.data = bigger_buf_size;
|
||||
} else {
|
||||
// Allocate a new node if that's not possible
|
||||
cur_node = self.createNode(cur_buf.len, n + ptr_align) orelse return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resize(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, new_len: usize, ret_addr: usize) bool {
|
||||
const self = @as(*ArenaAllocator, @ptrCast(@alignCast(ctx)));
|
||||
_ = log2_buf_align;
|
||||
_ = ret_addr;
|
||||
|
||||
const cur_node = self.state.buffer_list.first orelse return false;
|
||||
const cur_buf = @as([*]u8, @ptrCast(cur_node))[@sizeOf(BufNode)..cur_node.data];
|
||||
if (@intFromPtr(cur_buf.ptr) + self.state.end_index != @intFromPtr(buf.ptr) + buf.len) {
|
||||
// It's not the most recent allocation, so it cannot be expanded,
|
||||
// but it's fine if they want to make it smaller.
|
||||
return new_len <= buf.len;
|
||||
}
|
||||
|
||||
if (buf.len >= new_len) {
|
||||
self.state.end_index -= buf.len - new_len;
|
||||
return true;
|
||||
} else if (cur_buf.len - self.state.end_index >= new_len - buf.len) {
|
||||
self.state.end_index += new_len - buf.len;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
fn free(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, ret_addr: usize) void {
|
||||
_ = log2_buf_align;
|
||||
_ = ret_addr;
|
||||
|
||||
const self = @as(*ArenaAllocator, @ptrCast(@alignCast(ctx)));
|
||||
|
||||
const cur_node = self.state.buffer_list.first orelse return;
|
||||
const cur_buf = @as([*]u8, @ptrCast(cur_node))[@sizeOf(BufNode)..cur_node.data];
|
||||
|
||||
if (@intFromPtr(cur_buf.ptr) + self.state.end_index == @intFromPtr(buf.ptr) + buf.len) {
|
||||
self.state.end_index -= buf.len;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
test "ArenaAllocator (reset with preheating)" {
|
||||
var arena_allocator = ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
// provides some variance in the allocated data
|
||||
var rng_src = std.rand.DefaultPrng.init(19930913);
|
||||
const random = rng_src.random();
|
||||
var rounds: usize = 25;
|
||||
while (rounds > 0) {
|
||||
rounds -= 1;
|
||||
_ = arena_allocator.reset(.retain_capacity);
|
||||
var alloced_bytes: usize = 0;
|
||||
var total_size: usize = random.intRangeAtMost(usize, 256, 16384);
|
||||
while (alloced_bytes < total_size) {
|
||||
const size = random.intRangeAtMost(usize, 16, 256);
|
||||
const alignment = 32;
|
||||
const slice = try arena_allocator.allocator().alignedAlloc(u8, alignment, size);
|
||||
try std.testing.expect(std.mem.isAligned(@intFromPtr(slice.ptr), alignment));
|
||||
try std.testing.expectEqual(size, slice.len);
|
||||
alloced_bytes += slice.len;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test "ArenaAllocator (reset while retaining a buffer)" {
|
||||
var arena_allocator = ArenaAllocator.init(std.testing.allocator);
|
||||
defer arena_allocator.deinit();
|
||||
const a = arena_allocator.allocator();
|
||||
|
||||
// Create two internal buffers
|
||||
_ = try a.alloc(u8, 1);
|
||||
_ = try a.alloc(u8, 1000);
|
||||
|
||||
// Check that we have at least two buffers
|
||||
try std.testing.expect(arena_allocator.state.buffer_list.first.?.next != null);
|
||||
|
||||
// This retains the first allocated buffer
|
||||
try std.testing.expect(arena_allocator.reset(.{ .retain_with_limit = 1 }));
|
||||
}
|
||||
@@ -55,7 +55,7 @@ pub const version: @import("./install/semver.zig").Version = .{
|
||||
|
||||
pub fn setThreadName(name: StringTypes.stringZ) void {
|
||||
if (Environment.isLinux) {
|
||||
_ = std.os.prctl(.SET_NAME, .{@ptrToInt(name.ptr)}) catch 0;
|
||||
_ = std.os.prctl(.SET_NAME, .{@intFromPtr(name.ptr)}) catch 0;
|
||||
} else if (Environment.isMac) {
|
||||
_ = std.c.pthread_setname_np(name);
|
||||
}
|
||||
|
||||
@@ -2,20 +2,19 @@ const std = @import("std");
|
||||
|
||||
const FeatureFlags = @import("./feature_flags.zig");
|
||||
const Environment = @import("./env.zig");
|
||||
const Wyhash = std.hash.Wyhash;
|
||||
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
|
||||
const constStrToU8 = @import("root").bun.constStrToU8;
|
||||
const bun = @import("root").bun;
|
||||
pub fn isSliceInBuffer(slice: anytype, buffer: anytype) bool {
|
||||
return (@ptrToInt(&buffer) <= @ptrToInt(slice.ptr) and (@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer) + buffer.len));
|
||||
return (@intFromPtr(&buffer) <= @intFromPtr(slice.ptr) and (@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer) + buffer.len));
|
||||
}
|
||||
|
||||
pub fn sliceRange(slice: []const u8, buffer: []const u8) ?[2]u32 {
|
||||
return if (@ptrToInt(buffer.ptr) <= @ptrToInt(slice.ptr) and
|
||||
(@ptrToInt(slice.ptr) + slice.len) <= (@ptrToInt(buffer.ptr) + buffer.len))
|
||||
return if (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
(@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer.ptr) + buffer.len))
|
||||
[2]u32{
|
||||
@truncate(u32, @ptrToInt(slice.ptr) - @ptrToInt(buffer.ptr)),
|
||||
@truncate(u32, slice.len),
|
||||
@as(u32, @truncate(@intFromPtr(slice.ptr) - @intFromPtr(buffer.ptr))),
|
||||
@as(u32, @truncate(slice.len)),
|
||||
}
|
||||
else
|
||||
null;
|
||||
@@ -53,7 +52,6 @@ pub const Result = struct {
|
||||
return r.index >= count;
|
||||
}
|
||||
};
|
||||
const Seed = 999;
|
||||
|
||||
pub const NotFound = IndexType{
|
||||
.index = std.math.maxInt(u31),
|
||||
@@ -429,7 +427,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
var result = IndexType{ .index = std.math.maxInt(u31), .is_overflow = instance.slice_buf_used > max_index };
|
||||
|
||||
if (result.is_overflow) {
|
||||
result.index = @intCast(u31, self.overflow_list.len());
|
||||
result.index = @as(u31, @intCast(self.overflow_list.len()));
|
||||
} else {
|
||||
result.index = instance.slice_buf_used;
|
||||
instance.slice_buf_used += 1;
|
||||
@@ -488,7 +486,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
pub fn getOrPut(self: *Self, denormalized_key: []const u8) !Result {
|
||||
const key = if (comptime remove_trailing_slashes) std.mem.trimRight(u8, denormalized_key, "/") else denormalized_key;
|
||||
const _key = Wyhash.hash(Seed, key);
|
||||
const _key = bun.hash(key);
|
||||
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
@@ -516,7 +514,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
pub fn get(self: *Self, denormalized_key: []const u8) ?*ValueType {
|
||||
const key = if (comptime remove_trailing_slashes) std.mem.trimRight(u8, denormalized_key, "/") else denormalized_key;
|
||||
const _key = Wyhash.hash(Seed, key);
|
||||
const _key = bun.hash(key);
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
const index = self.index.get(_key) orelse return null;
|
||||
@@ -577,7 +575,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
|
||||
const key = if (comptime remove_trailing_slashes) std.mem.trimRight(u8, denormalized_key, "/") else denormalized_key;
|
||||
|
||||
const _key = Wyhash.hash(Seed, key);
|
||||
const _key = bun.hash(key);
|
||||
_ = self.index.remove(_key);
|
||||
// const index = self.index.get(_key) orelse return;
|
||||
// switch (index) {
|
||||
@@ -695,7 +693,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
if (!result.index.is_overflow) {
|
||||
instance.key_list_slices[result.index.index] = slice;
|
||||
} else {
|
||||
if (@intCast(u31, instance.key_list_overflow.items.len) > result.index.index) {
|
||||
if (@as(u31, @intCast(instance.key_list_overflow.items.len)) > result.index.index) {
|
||||
const existing_slice = instance.key_list_overflow.items[result.index.index];
|
||||
if (!isKeyStaticallyAllocated(existing_slice)) {
|
||||
self.map.allocator.free(existing_slice);
|
||||
|
||||
@@ -93,7 +93,7 @@ pub const Reader = struct {
|
||||
},
|
||||
.Enum => |type_info| {
|
||||
const enum_values = try this.read(length * @sizeOf(type_info.tag_type));
|
||||
return @ptrCast([*]T, enum_values.ptr)[0..length];
|
||||
return @as([*]T, @ptrCast(enum_values.ptr))[0..length];
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@@ -156,7 +156,7 @@ pub const Reader = struct {
|
||||
.Packed => {
|
||||
const sizeof = @sizeOf(T);
|
||||
var slice = try this.read(sizeof);
|
||||
return @ptrCast(*align(1) T, slice[0..sizeof]).*;
|
||||
return @as(*align(1) T, @ptrCast(slice[0..sizeof])).*;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@@ -201,7 +201,7 @@ pub fn Writer(comptime WritableStream: type) type {
|
||||
}
|
||||
|
||||
pub inline fn writeEnum(this: *Self, val: anytype) !void {
|
||||
try this.writeInt(@enumToInt(val));
|
||||
try this.writeInt(@intFromEnum(val));
|
||||
}
|
||||
|
||||
pub fn writeValue(this: *Self, comptime SliceType: type, slice: SliceType) !void {
|
||||
@@ -264,7 +264,7 @@ pub fn Writer(comptime WritableStream: type) type {
|
||||
}
|
||||
|
||||
pub fn writeArray(this: *Self, comptime T: type, slice: anytype) !void {
|
||||
try this.writeInt(@truncate(u32, slice.len));
|
||||
try this.writeInt(@as(u32, @truncate(slice.len)));
|
||||
|
||||
switch (T) {
|
||||
u8 => {
|
||||
|
||||
@@ -95,7 +95,7 @@ pub const Features = struct {
|
||||
|
||||
pub const Serializer = struct {
|
||||
inline fn shiftIndex(index: u32) !u32 {
|
||||
return @intCast(u32, @as(Bitset.MaskInt, 1) << @intCast(Bitset.ShiftInt, index));
|
||||
return @as(u32, @intCast(@as(Bitset.MaskInt, 1) << @as(Bitset.ShiftInt, @intCast(index))));
|
||||
}
|
||||
|
||||
fn writeField(comptime WriterType: type, writer: WriterType, field_name: string, index: u32) !void {
|
||||
@@ -200,7 +200,7 @@ pub const Event = struct {
|
||||
pub fn init(comptime name: EventName) Event {
|
||||
const millis = std.time.milliTimestamp();
|
||||
|
||||
const timestamp = if (millis < 0) 0 else @intCast(u64, millis);
|
||||
const timestamp = if (millis < 0) 0 else @as(u64, @intCast(millis));
|
||||
|
||||
return Event{ .timestamp = timestamp, .data = @unionInit(Data, @tagName(name), {}) };
|
||||
}
|
||||
@@ -239,7 +239,7 @@ pub const GenerateHeader = struct {
|
||||
return Analytics.EventListHeader{
|
||||
.machine_id = GenerateMachineID.forMac() catch Analytics.Uint64{},
|
||||
.platform = GeneratePlatform.forMac(),
|
||||
.build_id = comptime @truncate(u32, Global.build_id),
|
||||
.build_id = comptime @as(u32, @truncate(Global.build_id)),
|
||||
.session_id = random.random().int(u32),
|
||||
.project_id = project_id,
|
||||
};
|
||||
@@ -249,7 +249,7 @@ pub const GenerateHeader = struct {
|
||||
return Analytics.EventListHeader{
|
||||
.machine_id = GenerateMachineID.forLinux() catch Analytics.Uint64{},
|
||||
.platform = GeneratePlatform.forLinux(),
|
||||
.build_id = comptime @truncate(u32, Global.build_id),
|
||||
.build_id = comptime @as(u32, @truncate(Global.build_id)),
|
||||
.session_id = random.random().int(u32),
|
||||
.project_id = project_id,
|
||||
};
|
||||
@@ -261,7 +261,7 @@ pub const GenerateHeader = struct {
|
||||
pub const GeneratePlatform = struct {
|
||||
var osversion_name: [32]u8 = undefined;
|
||||
pub fn forMac() Analytics.Platform {
|
||||
@memset(&osversion_name, 0, osversion_name.len);
|
||||
@memset(&osversion_name, 0);
|
||||
|
||||
var platform = Analytics.Platform{ .os = Analytics.OperatingSystem.macos, .version = &[_]u8{}, .arch = platform_arch };
|
||||
var len = osversion_name.len - 1;
|
||||
@@ -340,7 +340,7 @@ pub const GenerateHeader = struct {
|
||||
offset = std.mem.indexOfScalar(u8, out, '"') orelse return Analytics.Uint64{};
|
||||
out = out[0..offset];
|
||||
|
||||
const hash = std.hash.Wyhash.hash(0, std.mem.trim(u8, out, "\n\r "));
|
||||
const hash = bun.hash(std.mem.trim(u8, out, "\n\r "));
|
||||
var hash_bytes = std.mem.asBytes(&hash);
|
||||
return Analytics.Uint64{
|
||||
.first = std.mem.readIntNative(u32, hash_bytes[0..4]),
|
||||
@@ -357,7 +357,7 @@ pub const GenerateHeader = struct {
|
||||
defer file.close();
|
||||
var read_count = try file.read(&linux_machine_id);
|
||||
|
||||
const hash = std.hash.Wyhash.hash(0, std.mem.trim(u8, linux_machine_id[0..read_count], "\n\r "));
|
||||
const hash = bun.hash(std.mem.trim(u8, linux_machine_id[0..read_count], "\n\r "));
|
||||
var hash_bytes = std.mem.asBytes(&hash);
|
||||
return Analytics.Uint64{
|
||||
.first = std.mem.readIntNative(u32, hash_bytes[0..4]),
|
||||
@@ -380,10 +380,10 @@ fn spawn() !void {}
|
||||
|
||||
const headers_buf: string = "Content-Type binary/peechy";
|
||||
const header_entry = Headers.Kv{
|
||||
.name = .{ .offset = 0, .length = @intCast(u32, "Content-Type".len) },
|
||||
.name = .{ .offset = 0, .length = @as(u32, @intCast("Content-Type".len)) },
|
||||
.value = .{
|
||||
.offset = std.mem.indexOf(u8, headers_buf, "binary/peechy").?,
|
||||
.length = @intCast(u32, "binary/peechy".len),
|
||||
.length = @as(u32, @intCast("binary/peechy".len)),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -435,7 +435,7 @@ pub const EventList = struct {
|
||||
in_buffer: MutableString,
|
||||
|
||||
pub fn init() EventList {
|
||||
random = std.rand.DefaultPrng.init(@intCast(u64, std.time.milliTimestamp()));
|
||||
random = std.rand.DefaultPrng.init(@as(u64, @intCast(std.time.milliTimestamp())));
|
||||
return EventList{
|
||||
.header = GenerateHeader.generate(),
|
||||
.events = std.ArrayList(Event).init(default_allocator),
|
||||
@@ -472,12 +472,12 @@ pub const EventList = struct {
|
||||
0;
|
||||
const now = std.time.nanoTimestamp();
|
||||
|
||||
this.header.session_length = @truncate(u32, @intCast(u64, (now - start_time)) / std.time.ns_per_ms);
|
||||
this.header.session_length = @as(u32, @truncate(@as(u64, @intCast((now - start_time))) / std.time.ns_per_ms));
|
||||
this.header.feature_usage = Features.toInt();
|
||||
|
||||
var list = Analytics.EventList{
|
||||
.header = this.header,
|
||||
.event_count = @intCast(u32, this.events.items.len),
|
||||
.event_count = @as(u32, @intCast(this.events.items.len)),
|
||||
};
|
||||
|
||||
try list.encode(&analytics_writer);
|
||||
@@ -540,8 +540,8 @@ pub const EventList = struct {
|
||||
}
|
||||
|
||||
@atomicStore(bool, &is_stuck, retry_remaining == 0, .Release);
|
||||
stuck_count += @intCast(u8, @boolToInt(retry_remaining == 0));
|
||||
stuck_count *= @intCast(u8, @boolToInt(retry_remaining == 0));
|
||||
stuck_count += @as(u8, @intCast(@intFromBool(retry_remaining == 0)));
|
||||
stuck_count *= @as(u8, @intCast(@intFromBool(retry_remaining == 0)));
|
||||
disabled = disabled or stuck_count > 4;
|
||||
|
||||
this.in_buffer.reset();
|
||||
|
||||
34
src/api/demo/.gitignore
vendored
34
src/api/demo/.gitignore
vendored
@@ -1,34 +0,0 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
@@ -1,34 +0,0 @@
|
||||
This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
|
||||
|
||||
## Getting Started
|
||||
|
||||
First, run the development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
# or
|
||||
yarn dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
||||
|
||||
You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file.
|
||||
|
||||
[API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`.
|
||||
|
||||
The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages.
|
||||
|
||||
## Learn More
|
||||
|
||||
To learn more about Next.js, take a look at the following resources:
|
||||
|
||||
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
|
||||
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
||||
|
||||
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome!
|
||||
|
||||
## Deploy on Vercel
|
||||
|
||||
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
|
||||
|
||||
Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details.
|
||||
Binary file not shown.
@@ -1,280 +0,0 @@
|
||||
import * as Schema from "../../schema";
|
||||
import { ByteBuffer } from "peechy";
|
||||
import path from "path";
|
||||
import { Loader } from "../schema";
|
||||
// import { transform as sucraseTransform } from "sucrase";
|
||||
|
||||
export interface WebAssemblyModule {
|
||||
init(): number;
|
||||
transform(a: number): number;
|
||||
bun_malloc(a: number): number;
|
||||
bun_free(a: number): number;
|
||||
scan(a: number): number;
|
||||
}
|
||||
|
||||
const wasm_imports_sym: symbol | string =
|
||||
process.env.NODE_ENV === "development" ? "wasm_imports" : Symbol("wasm_imports");
|
||||
|
||||
const ptr_converter = new ArrayBuffer(16);
|
||||
const ptr_float = new BigUint64Array(ptr_converter);
|
||||
const slice = new Uint32Array(ptr_converter);
|
||||
|
||||
const Wasi = {
|
||||
clock_time_get(clk_id, tp) {
|
||||
return Date.now();
|
||||
},
|
||||
environ_sizes_get() {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
environ_get(__environ, environ_buf) {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
|
||||
fd_close(fd) {
|
||||
debugger;
|
||||
return 0;
|
||||
},
|
||||
proc_exit() {},
|
||||
|
||||
fd_seek(fd, offset_bigint, whence, newOffset) {
|
||||
debugger;
|
||||
},
|
||||
fd_write(fd, iov, iovcnt, pnum) {
|
||||
debugger;
|
||||
},
|
||||
};
|
||||
|
||||
var scratch: Uint8Array;
|
||||
var scratch2: Uint8Array;
|
||||
|
||||
export class Bun {
|
||||
static has_initialized = false;
|
||||
static wasm_source: WebAssembly.WebAssemblyInstantiatedSource = null;
|
||||
static get wasm_exports(): WebAssemblyModule {
|
||||
return Bun.wasm_source.instance.exports as any;
|
||||
}
|
||||
static get memory(): WebAssembly.Memory {
|
||||
return Bun.wasm_source.instance.exports.memory as any;
|
||||
}
|
||||
|
||||
static memory_array: Uint8Array;
|
||||
|
||||
static _decoder: TextDecoder;
|
||||
|
||||
static _wasmPtrToSlice(offset: number | bigint) {
|
||||
ptr_float[0] = typeof offset === "number" ? BigInt(offset) : offset;
|
||||
return new Uint8Array(Bun.memory.buffer, slice[0], slice[1]);
|
||||
}
|
||||
|
||||
static _wasmPtrLenToString(slice: number) {
|
||||
if (!Bun._decoder) {
|
||||
Bun._decoder = new TextDecoder("utf8");
|
||||
}
|
||||
|
||||
const region = this._wasmPtrToSlice(slice);
|
||||
return Bun._decoder.decode(region);
|
||||
}
|
||||
|
||||
// We don't want people to be calling these manually
|
||||
static [wasm_imports_sym] = {
|
||||
console_log(slice: number) {
|
||||
console.log(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
console_error(slice: number) {
|
||||
console.error(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
console_warn(slice: number) {
|
||||
console.warn(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
console_info(slice: number) {
|
||||
console.info(Bun._wasmPtrLenToString(slice));
|
||||
},
|
||||
|
||||
__indirect_function_table: new WebAssembly.Table({
|
||||
initial: 0,
|
||||
element: "anyfunc",
|
||||
}),
|
||||
__stack_pointer: new WebAssembly.Global({
|
||||
mutable: true,
|
||||
value: "i32",
|
||||
}),
|
||||
__multi3(one: number, two: number) {
|
||||
return Math.imul(one | 0, two | 0);
|
||||
},
|
||||
fmod(one: number, two: number) {
|
||||
return one % two;
|
||||
},
|
||||
memset(ptr: number, value: number, len: number) {
|
||||
Bun.memory_array.fill(value, ptr, ptr + len);
|
||||
},
|
||||
memcpy(ptr: number, value: number, len: number) {
|
||||
Bun.memory_array.copyWithin(ptr, value, value + len);
|
||||
},
|
||||
// These functions convert a to an unsigned long long, rounding toward zero. Negative values all become zero.
|
||||
__fixunsdfti(a: number) {
|
||||
return Math.floor(a);
|
||||
},
|
||||
// These functions return the remainder of the unsigned division of a and b.
|
||||
__umodti3(a: number, b: number) {
|
||||
return (a | 0) % (b | 0);
|
||||
},
|
||||
// These functions return the quotient of the unsigned division of a and b.
|
||||
__udivti3(a: number, b: number) {
|
||||
return (a | 0) / (b | 0);
|
||||
},
|
||||
// These functions return the result of shifting a left by b bits.
|
||||
__ashlti3(a: number, b: number) {
|
||||
return (a | 0) >> (b | 0);
|
||||
},
|
||||
/* Returns: convert a to a double, rounding toward even. */
|
||||
__floatuntidf(a: number) {
|
||||
const mod = a % 2;
|
||||
if (mod === 0) {
|
||||
return Math.ceil(a);
|
||||
} else if (mod === 1) {
|
||||
return Math.floor(a);
|
||||
}
|
||||
},
|
||||
emscripten_notify_memory_growth() {},
|
||||
};
|
||||
|
||||
static async init(url) {
|
||||
// globalThis.sucraseTransform = sucraseTransform;
|
||||
scratch = new Uint8Array(8096);
|
||||
|
||||
if (Bun.has_initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
Bun.wasm_source = await globalThis.WebAssembly.instantiateStreaming(fetch(url), {
|
||||
env: Bun[wasm_imports_sym],
|
||||
wasi_snapshot_preview1: Wasi,
|
||||
});
|
||||
|
||||
const res = Bun.wasm_exports.init();
|
||||
if (res < 0) {
|
||||
throw `[Bun] Failed to initialize WASM module: code ${res}`;
|
||||
} else {
|
||||
console.log("WASM loaded.");
|
||||
}
|
||||
|
||||
Bun.has_initialized = true;
|
||||
}
|
||||
|
||||
static transformSync(content: Uint8Array | string, file_name: string) {
|
||||
if (!Bun.has_initialized) {
|
||||
throw "Please run await Bun.init(wasm_url) before using this.";
|
||||
}
|
||||
|
||||
// if (process.env.NODE_ENV === "development") {
|
||||
// console.time("[Bun] Transform " + file_name);
|
||||
// }
|
||||
|
||||
const bb = new ByteBuffer(scratch);
|
||||
bb.length = 0;
|
||||
bb.index = 0;
|
||||
var contents_buffer;
|
||||
if (typeof content === "string") {
|
||||
if (!scratch2) {
|
||||
scratch2 = new Uint8Array(content.length * 2);
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
for (; i < content.length; i++) {
|
||||
if (i > scratch2.length) {
|
||||
var scratch3 = new Uint8Array(scratch2.length * 2);
|
||||
scratch3.set(scratch2);
|
||||
scratch2 = scratch3;
|
||||
}
|
||||
scratch2[i] = content.charCodeAt(i);
|
||||
}
|
||||
contents_buffer = scratch2.subarray(0, i);
|
||||
} else {
|
||||
contents_buffer = content;
|
||||
}
|
||||
|
||||
Schema.encodeTransform(
|
||||
{
|
||||
contents: contents_buffer,
|
||||
path: file_name,
|
||||
loader: {
|
||||
".jsx": Loader.jsx,
|
||||
".tsx": Loader.tsx,
|
||||
".ts": Loader.ts,
|
||||
".js": Loader.js,
|
||||
".json": Loader.json,
|
||||
}[path.extname(file_name)],
|
||||
},
|
||||
bb,
|
||||
);
|
||||
const data = bb.toUint8Array();
|
||||
|
||||
const input_ptr = Bun.wasm_exports.bun_malloc(data.length);
|
||||
var buffer = this._wasmPtrToSlice(input_ptr);
|
||||
buffer.set(data);
|
||||
|
||||
const resp_ptr = Bun.wasm_exports.transform(input_ptr);
|
||||
var _bb = new ByteBuffer(this._wasmPtrToSlice(resp_ptr));
|
||||
const response = Schema.decodeTransformResponse(_bb);
|
||||
Bun.wasm_exports.bun_free(input_ptr);
|
||||
scratch = bb.data;
|
||||
return response;
|
||||
}
|
||||
|
||||
static scan(content: Uint8Array | string, file_name: string, loader?: Loader) {
|
||||
if (!Bun.has_initialized) {
|
||||
throw "Please run await Bun.init(wasm_url) before using this.";
|
||||
}
|
||||
|
||||
// if (process.env.NODE_ENV === "development") {
|
||||
// console.time("[Bun] Transform " + file_name);
|
||||
// }
|
||||
scratch.fill(0);
|
||||
const bb = new ByteBuffer(scratch);
|
||||
bb.length = 0;
|
||||
bb.index = 0;
|
||||
var contents_buffer;
|
||||
if (typeof content === "string") {
|
||||
if (!scratch2) {
|
||||
scratch2 = new Uint8Array(content.length * 2);
|
||||
}
|
||||
const encode_into = new TextEncoder().encodeInto(content, scratch2);
|
||||
contents_buffer = scratch2.subarray(0, encode_into.written);
|
||||
} else {
|
||||
contents_buffer = content;
|
||||
}
|
||||
|
||||
Schema.encodeScan(
|
||||
{
|
||||
contents: contents_buffer,
|
||||
path: file_name,
|
||||
loader:
|
||||
loader ||
|
||||
{
|
||||
".jsx": Loader.jsx,
|
||||
".tsx": Loader.tsx,
|
||||
".ts": Loader.ts,
|
||||
".js": Loader.js,
|
||||
".json": Loader.json,
|
||||
}[path.extname(file_name)],
|
||||
},
|
||||
bb,
|
||||
);
|
||||
const data = bb.toUint8Array();
|
||||
|
||||
const input_ptr = Bun.wasm_exports.bun_malloc(data.length);
|
||||
var buffer = this._wasmPtrToSlice(input_ptr);
|
||||
buffer.set(data);
|
||||
|
||||
const resp_ptr = Bun.wasm_exports.scan(input_ptr);
|
||||
var _bb = new ByteBuffer(this._wasmPtrToSlice(resp_ptr));
|
||||
const response = Schema.decodeScanResult(_bb);
|
||||
Bun.wasm_exports.bun_free(input_ptr);
|
||||
scratch = bb.data;
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
globalThis.Bun = Bun;
|
||||
@@ -1,70 +0,0 @@
|
||||
import { transform as _transform, initialize } from "esbuild-wasm";
|
||||
import initSwc, { transformSync as transformSyncSWC } from "@swc/wasm-web";
|
||||
import { Bun } from "./api";
|
||||
|
||||
export async function start() {
|
||||
await initialize({
|
||||
worker: false,
|
||||
wasmURL: "/node_modules/esbuild-wasm/esbuild.wasm",
|
||||
});
|
||||
await Bun.init("/bun-wasm.wasm");
|
||||
await initSwc("/node_modules/@swc/wasm-web/wasm_bg.wasm");
|
||||
}
|
||||
|
||||
const swcOptions = {
|
||||
sourceMaps: false,
|
||||
inlineSourcesContent: false,
|
||||
jsc: {
|
||||
target: "es2022",
|
||||
parser: {
|
||||
jsx: true,
|
||||
syntax: "typescript",
|
||||
tsx: false,
|
||||
decorators: false,
|
||||
dynamicImport: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export async function transform(contents, file) {
|
||||
var result: any = {
|
||||
timings: {
|
||||
esbuild: 0,
|
||||
bun: 0,
|
||||
swc: 0,
|
||||
},
|
||||
};
|
||||
result.timings.esbuild = performance.now();
|
||||
result.esbuild = await _transform(contents, {
|
||||
sourcefile: file,
|
||||
loader: file.substring(file.lastIndexOf(".") + 1),
|
||||
});
|
||||
result.timings.esbuild = performance.now() - result.timings.esbuild;
|
||||
|
||||
result.timings.bun = performance.now();
|
||||
result.bun = Bun.transformSync(contents, file);
|
||||
result.timings.bun = performance.now() - result.timings.bun;
|
||||
|
||||
if (file.substring(file.lastIndexOf(".") + 1) === "tsx") {
|
||||
swcOptions.jsc.parser.tsx = true;
|
||||
swcOptions.jsc.parser.syntax = "typescript";
|
||||
} else if (file.substring(file.lastIndexOf(".") + 1) === "jsx") {
|
||||
swcOptions.jsc.parser.tsx = false;
|
||||
swcOptions.jsc.parser.jsx = true;
|
||||
swcOptions.jsc.parser.syntax = "typescript";
|
||||
} else {
|
||||
swcOptions.jsc.parser.tsx = false;
|
||||
swcOptions.jsc.parser.jsx = false;
|
||||
swcOptions.jsc.parser.syntax = "javascript";
|
||||
}
|
||||
|
||||
result.timings.swc = performance.now();
|
||||
result.swc = transformSyncSWC(contents, swcOptions as any);
|
||||
result.timings.swc = performance.now() - result.timings.swc;
|
||||
|
||||
console.log("esbuild:", result.timings.esbuild, "ms");
|
||||
console.log("Bun:", result.timings.bun, "ms");
|
||||
console.log("SWC:", result.timings.swc, "ms");
|
||||
|
||||
return result;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user