mirror of
https://github.com/oven-sh/bun
synced 2026-02-16 22:01:47 +00:00
Compare commits
146 Commits
bun-v0.7.3
...
bun-v0.8.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
20d42dfaa3 | ||
|
|
3556fa3b1e | ||
|
|
5e07fd4fbc | ||
|
|
c60385716b | ||
|
|
f3266ff436 | ||
|
|
b01764b31e | ||
|
|
851763174e | ||
|
|
8518fbb573 | ||
|
|
d86084dd8e | ||
|
|
52802a4c55 | ||
|
|
44e4d5852a | ||
|
|
3a45f2c71b | ||
|
|
9eeb7bdbff | ||
|
|
ed14b64e65 | ||
|
|
bca1bcf29c | ||
|
|
9027484ae1 | ||
|
|
91eacade97 | ||
|
|
6a02edef5d | ||
|
|
a61953bbfd | ||
|
|
f629365cb7 | ||
|
|
cdace9bffb | ||
|
|
664119841a | ||
|
|
397182b400 | ||
|
|
752e59f23c | ||
|
|
def5a85d90 | ||
|
|
1b8f569792 | ||
|
|
183b818462 | ||
|
|
9cf868fef4 | ||
|
|
ed803f7d1c | ||
|
|
c13a27121c | ||
|
|
8c23e77e99 | ||
|
|
c99a9ba33a | ||
|
|
f75b949524 | ||
|
|
3de9ce5f30 | ||
|
|
65280853ac | ||
|
|
3a9a6c63ac | ||
|
|
360acf5a80 | ||
|
|
d432dad666 | ||
|
|
eec5bfb23a | ||
|
|
53c755467b | ||
|
|
039404800f | ||
|
|
3c4b689050 | ||
|
|
394dd86797 | ||
|
|
62bde005b2 | ||
|
|
19054ebc35 | ||
|
|
507761b463 | ||
|
|
196620183f | ||
|
|
86ad015147 | ||
|
|
db09ed15fd | ||
|
|
bf517d9f8e | ||
|
|
e9b684c972 | ||
|
|
e8f0ed3beb | ||
|
|
3ca7fde363 | ||
|
|
c2ec47ff32 | ||
|
|
26036a390b | ||
|
|
943a664224 | ||
|
|
d0664f8377 | ||
|
|
6fd0043f6b | ||
|
|
0424fd8f6e | ||
|
|
cfbd1373e2 | ||
|
|
f74585ff01 | ||
|
|
b2f8ef4dff | ||
|
|
b0e76a965d | ||
|
|
cfce38858f | ||
|
|
540740c129 | ||
|
|
b1356718ad | ||
|
|
b9b50e39d6 | ||
|
|
6c3dabd84e | ||
|
|
d4438e9496 | ||
|
|
56d591b903 | ||
|
|
0486cea35a | ||
|
|
2634c64aa3 | ||
|
|
af23dab833 | ||
|
|
9efeef2e24 | ||
|
|
67a8b75183 | ||
|
|
bdaa712357 | ||
|
|
b682e5bf59 | ||
|
|
17c348ed0a | ||
|
|
f05a38757a | ||
|
|
553a471cdf | ||
|
|
1a6a52314f | ||
|
|
47450ed12c | ||
|
|
eab7b4c3ea | ||
|
|
0329061f15 | ||
|
|
1e96cbfb02 | ||
|
|
78defe7a87 | ||
|
|
b94433ce86 | ||
|
|
ca26780b27 | ||
|
|
117cee5ca5 | ||
|
|
43ebffedcd | ||
|
|
ccb9daf7a4 | ||
|
|
c6d3b375b8 | ||
|
|
4731a460a2 | ||
|
|
99af827f25 | ||
|
|
a6ec7fe9b1 | ||
|
|
513a6d0df3 | ||
|
|
115704b27b | ||
|
|
85b81624dc | ||
|
|
cf12d80f5e | ||
|
|
58e74eadb5 | ||
|
|
be05b93f39 | ||
|
|
e25833d009 | ||
|
|
e65535cc05 | ||
|
|
74f9fabd01 | ||
|
|
8d207925d7 | ||
|
|
e0569ac47a | ||
|
|
40befd8770 | ||
|
|
6718950a39 | ||
|
|
68ac8c12c2 | ||
|
|
28d1676d50 | ||
|
|
883c4d8778 | ||
|
|
8717303a80 | ||
|
|
385d440694 | ||
|
|
b3019270c9 | ||
|
|
5d7c77aab0 | ||
|
|
2c70837ae7 | ||
|
|
bed7ff7dd0 | ||
|
|
450b066cb8 | ||
|
|
63f58f4026 | ||
|
|
009fe18fa2 | ||
|
|
40d00a961e | ||
|
|
1941dbbd71 | ||
|
|
320cdcf97e | ||
|
|
454407003e | ||
|
|
511f6bdf79 | ||
|
|
320ee6b6b7 | ||
|
|
38df5b146f | ||
|
|
25c91aecab | ||
|
|
5497accbdb | ||
|
|
182e600eb7 | ||
|
|
cb873cc081 | ||
|
|
330d19e344 | ||
|
|
2fe6a965af | ||
|
|
a32097aa9f | ||
|
|
1239c9460a | ||
|
|
f2f227720b | ||
|
|
0b183beb51 | ||
|
|
5ce393aab8 | ||
|
|
00a907c7de | ||
|
|
0665733b03 | ||
|
|
70c3371b14 | ||
|
|
0b2be88bac | ||
|
|
8297fb0d2f | ||
|
|
04925bb94c | ||
|
|
3185ca2d95 | ||
|
|
b93f304c06 |
2
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
2
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -14,7 +14,7 @@ body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Bun is running?
|
||||
description: Copy the output of `bun -v`
|
||||
description: Copy the output of `bun --revision`
|
||||
- type: input
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
|
||||
2
.github/workflows/bun-linux-aarch64.yml
vendored
2
.github/workflows/bun-linux-aarch64.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
|
||||
5
.github/workflows/bun-linux-build.yml
vendored
5
.github/workflows/bun-linux-build.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
- cpu: nehalem
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
|
||||
@@ -197,6 +197,7 @@ jobs:
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
|
||||
16
.github/workflows/bun-mac-aarch64.yml
vendored
16
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
dependencies: true
|
||||
compile_obj: true
|
||||
@@ -257,7 +257,7 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -265,14 +265,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
16
.github/workflows/bun-mac-x64-baseline.yml
vendored
16
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -258,7 +258,7 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
@@ -266,14 +266,14 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
16
.github/workflows/bun-mac-x64.yml
vendored
16
.github/workflows/bun-mac-x64.yml
vendored
@@ -117,7 +117,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: haswell
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: nehalem
|
||||
@@ -135,7 +135,7 @@ jobs:
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: haswell
|
||||
@@ -144,7 +144,7 @@ jobs:
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: native
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
@@ -260,7 +260,7 @@ jobs:
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-11
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
@@ -268,14 +268,14 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-11
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-2/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-aug3-5/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -96,6 +96,8 @@ packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
*.bc
|
||||
|
||||
src/fallback.version
|
||||
|
||||
7
.vscode/c_cpp_properties.json
vendored
7
.vscode/c_cpp_properties.json
vendored
@@ -15,10 +15,12 @@
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/src/js/out",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src"
|
||||
],
|
||||
"browse": {
|
||||
@@ -31,6 +33,8 @@
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
@@ -39,7 +43,8 @@
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src"
|
||||
"${workspaceFolder}/src/deps/uws/uSockets/src",
|
||||
"${workspaceFolder}/src/napi"
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb"
|
||||
|
||||
4
.vscode/launch.json
generated
vendored
4
.vscode/launch.json
generated
vendored
@@ -134,11 +134,11 @@
|
||||
"request": "launch",
|
||||
"name": "bun run [file]",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "${file}"],
|
||||
"args": ["run", "${file}", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
"NODE_ENV": "development"
|
||||
},
|
||||
"initCommands": ["process handle -p false -s false -n false SIGHUP"],
|
||||
"console": "internalConsole"
|
||||
|
||||
@@ -47,34 +47,18 @@ TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and
|
||||
|
||||
Copy from examples like `Subprocess` or `Response`.
|
||||
|
||||
### ESM modules
|
||||
### ESM Modules and Builtins JS
|
||||
|
||||
Bun implements ESM modules in a mix of native code and JavaScript.
|
||||
|
||||
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
|
||||
|
||||
The ESM modules in Bun are located in [`src/bun.js/*.exports.js`](src/bun.js/). Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
|
||||
Builtin modules in Bun are located in [`src/js`](src/js/). These files are transpiled and support a JavaScriptCore-only syntax for internal slots, which is explained further in [`src/js/README.md`](src/js/README.md).
|
||||
|
||||
Native C++ modules are in `src/bun.js/modules/`.
|
||||
|
||||
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
|
||||
|
||||
### JavaScript Builtins
|
||||
|
||||
TODO: update this with the new build process that uses TypeScript and `$` instead of `@`.
|
||||
|
||||
JavaScript builtins are located in [`src/js/builtins/*.ts`](src/js/builtins).
|
||||
|
||||
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
|
||||
|
||||
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
|
||||
|
||||
To regenerate the builtins:
|
||||
|
||||
```sh
|
||||
make clean-bindings && make generate-builtins && make bindings -j10
|
||||
```
|
||||
|
||||
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
|
||||
|
||||
### Memory management in Bun's JavaScript runtime
|
||||
|
||||
TODO: fill this out (for now, use `JSC.Strong` in most cases)
|
||||
|
||||
@@ -10,7 +10,7 @@ ARG ARCH=x86_64
|
||||
ARG BUILD_MACHINE_ARCH=x86_64
|
||||
ARG TRIPLET=${ARCH}-linux-gnu
|
||||
ARG BUILDARCH=amd64
|
||||
ARG WEBKIT_TAG=2023-aug3-2
|
||||
ARG WEBKIT_TAG=2023-aug3-5
|
||||
ARG ZIG_TAG=jul1
|
||||
ARG ZIG_VERSION="0.11.0-dev.4006+bf827d0b5"
|
||||
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
|
||||
@@ -20,7 +20,7 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
|
||||
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
|
||||
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
|
||||
ARG GIT_SHA=""
|
||||
ARG BUN_BASE_VERSION=0.7
|
||||
ARG BUN_BASE_VERSION=0.8
|
||||
|
||||
FROM bitnami/minideb:bullseye as bun-base
|
||||
|
||||
|
||||
101
Makefile
101
Makefile
@@ -38,7 +38,7 @@ NATIVE_OR_OLD_MARCH = -march=nehalem
|
||||
endif
|
||||
|
||||
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
|
||||
BUN_BASE_VERSION = 0.7
|
||||
BUN_BASE_VERSION = 0.8
|
||||
|
||||
CI ?= false
|
||||
|
||||
@@ -352,7 +352,7 @@ LINUX_INCLUDE_DIRS := $(ALL_JSC_INCLUDE_DIRS) \
|
||||
UWS_INCLUDE_DIR := -I$(BUN_DEPS_DIR)/uws/uSockets/src -I$(BUN_DEPS_DIR)/uws/src -I$(BUN_DEPS_DIR)
|
||||
|
||||
|
||||
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
|
||||
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include -Isrc/bun.js/modules
|
||||
|
||||
|
||||
ifeq ($(OS_NAME),linux)
|
||||
@@ -401,6 +401,7 @@ CLANG_FLAGS = $(INCLUDE_DIRS) \
|
||||
-DSTATICALLY_LINKED_WITH_BMALLOC=1 \
|
||||
-DBUILDING_WITH_CMAKE=1 \
|
||||
-DBUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1 \
|
||||
-DNAPI_EXPERIMENTAL=ON \
|
||||
-DNDEBUG=1 \
|
||||
-DNOMINMAX \
|
||||
-DIS_BUILD \
|
||||
@@ -555,7 +556,7 @@ tinycc:
|
||||
PYTHON=$(shell which python 2>/dev/null || which python3 2>/dev/null || which python2 2>/dev/null)
|
||||
|
||||
.PHONY: esm
|
||||
js:
|
||||
js: # to rebundle js (rebuilding binary not needed to reload js code)
|
||||
NODE_ENV=production bun src/js/_codegen/index.ts
|
||||
|
||||
esm-debug:
|
||||
@@ -660,8 +661,8 @@ else
|
||||
PKGNAME_NINJA := ninja-build
|
||||
endif
|
||||
|
||||
.PHONY: require
|
||||
require:
|
||||
.PHONY: assert-deps
|
||||
assert-deps:
|
||||
@echo "Checking if the required utilities are available..."
|
||||
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@15"; exit 1; fi
|
||||
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
|
||||
@@ -673,6 +674,9 @@ require:
|
||||
@which $(LIBTOOL) > /dev/null || (echo -e "ERROR: libtool is required. Install with:\n\n $(POSIX_PKG_MANAGER) install libtool"; exit 1)
|
||||
@which ninja > /dev/null || (echo -e "ERROR: Ninja is required. Install with:\n\n $(POSIX_PKG_MANAGER) install $(PKGNAME_NINJA)"; exit 1)
|
||||
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
|
||||
@which rustc > /dev/null || (echo -e "ERROR: rustc is required." exit 1)
|
||||
@which cargo > /dev/null || (echo -e "ERROR: cargo is required." exit 1)
|
||||
@test $(shell cargo --version | awk '{print $$2}' | cut -d. -f2) -gt 57 || (echo -e "ERROR: cargo version must be at least 1.57."; exit 1)
|
||||
@echo "You have the dependencies installed! Woo"
|
||||
|
||||
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
|
||||
@@ -703,44 +707,46 @@ dev-build-obj-wasm:
|
||||
|
||||
.PHONY: dev-wasm
|
||||
dev-wasm: dev-build-obj-wasm
|
||||
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
|
||||
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
|
||||
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
|
||||
packages/debug-bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
|
||||
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
|
||||
-g2 -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
|
||||
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
|
||||
packages/debug-bun-freestanding-wasm32/bun-wasm.o --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
|
||||
-o packages/debug-bun-freestanding-wasm32/bun-wasm.wasm
|
||||
cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
|
||||
cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
|
||||
|
||||
.PHONY: build-obj-wasm
|
||||
build-obj-wasm:
|
||||
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
|
||||
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
|
||||
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
|
||||
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
|
||||
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
|
||||
-s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
|
||||
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
|
||||
packages/bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
|
||||
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
|
||||
cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
|
||||
cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
|
||||
|
||||
.PHONY: build-obj-wasm-small
|
||||
build-obj-wasm-small:
|
||||
$(ZIG) build bun-wasm -Doptimize=ReleaseSmall -Dtarget=wasm32-freestanding
|
||||
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
|
||||
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
|
||||
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
|
||||
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
|
||||
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
|
||||
-Oz -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
|
||||
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
|
||||
packages/bun-freestanding-wasm32/bun-wasm.o -Oz --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
|
||||
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
|
||||
cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
|
||||
cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
|
||||
|
||||
.PHONY: wasm
|
||||
wasm: api build-obj-wasm-small
|
||||
@rm -rf packages/bun-wasm/*.{d.ts,js,wasm,cjs,mjs,tsbuildinfo}
|
||||
wasm: api mimalloc-wasm build-obj-wasm-small
|
||||
@rm -rf packages/bun-wasm/*.{d.ts,d.cts,d.mts,js,wasm,cjs,mjs,tsbuildinfo}
|
||||
@cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
|
||||
@cp src/api/schema.d.ts packages/bun-wasm/schema.d.ts
|
||||
@cp src/api/schema.js packages/bun-wasm/schema.js
|
||||
@cd packages/bun-wasm && $(NPM_CLIENT) run tsc -- -p .
|
||||
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=esm --minify 2> /dev/null
|
||||
@cp packages/bun-wasm/index.d.ts packages/bun-wasm/index.d.cts
|
||||
@mv packages/bun-wasm/index.d.ts packages/bun-wasm/index.d.mts
|
||||
@bun build --sourcemap=external --external=fs --outdir=packages/bun-wasm --target=browser --minify ./packages/bun-wasm/index.ts
|
||||
@mv packages/bun-wasm/index.js packages/bun-wasm/index.mjs
|
||||
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.mjs.map
|
||||
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
|
||||
@$(ESBUILD) --sourcemap=external --external:fs --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
|
||||
@mv packages/bun-wasm/index.js packages/bun-wasm/index.cjs
|
||||
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.cjs.map
|
||||
@rm -rf packages/bun-wasm/*.tsbuildinfo
|
||||
@@ -938,6 +944,7 @@ headers:
|
||||
$(ZIG) translate-c src/bun.js/bindings/headers.h > src/bun.js/bindings/headers.zig
|
||||
$(BUN_OR_NODE) misctools/headers-cleaner.js
|
||||
$(ZIG) fmt src/bun.js/bindings/headers.zig
|
||||
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedCode.cpp
|
||||
|
||||
.PHONY: jsc-bindings-headers
|
||||
jsc-bindings-headers: headers
|
||||
@@ -1106,9 +1113,6 @@ endif
|
||||
dev-obj-linux:
|
||||
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
|
||||
|
||||
.PHONY: dev
|
||||
dev: mkdir-dev dev-obj link ## compile zig changes + link bun
|
||||
|
||||
mkdir-dev:
|
||||
mkdir -p $(DEBUG_PACKAGE_DIR)
|
||||
|
||||
@@ -1197,6 +1201,7 @@ jsc-build-mac-compile:
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DBUN_FAST_TLS=ON \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-G Ninja \
|
||||
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
|
||||
-DPTHREAD_JIT_PERMISSIONS_API=1 \
|
||||
@@ -1219,6 +1224,7 @@ jsc-build-mac-compile-lto:
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DBUN_FAST_TLS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DCMAKE_C_FLAGS="-flto=full" \
|
||||
-DCMAKE_CXX_FLAGS="-flto=full" \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
@@ -1243,6 +1249,7 @@ jsc-build-mac-compile-debug:
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
|
||||
-G Ninja \
|
||||
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
|
||||
@@ -1264,6 +1271,7 @@ jsc-build-linux-compile-config:
|
||||
-DENABLE_STATIC_JSC=ON \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DENABLE_REMOTE_INSPECTOR=ON \
|
||||
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
|
||||
@@ -1282,7 +1290,7 @@ jsc-build-linux-compile-config:
|
||||
jsc-build-linux-compile-build:
|
||||
mkdir -p $(WEBKIT_RELEASE_DIR) && \
|
||||
cd $(WEBKIT_RELEASE_DIR) && \
|
||||
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects" \
|
||||
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects" -DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
|
||||
|
||||
|
||||
@@ -1379,7 +1387,8 @@ mimalloc:
|
||||
|
||||
|
||||
mimalloc-wasm:
|
||||
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
|
||||
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
|
||||
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -GNinja -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=OFF .; emmake cmake --build .;
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
|
||||
|
||||
# alias for link, incase anyone still types that
|
||||
@@ -1897,26 +1906,44 @@ vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimallo
|
||||
vendor-without-check: npm-install vendor-without-npm
|
||||
|
||||
.PHONY: vendor
|
||||
vendor: require submodule vendor-without-check
|
||||
vendor: assert-deps submodule vendor-without-check
|
||||
|
||||
.PHONY: vendor-dev
|
||||
vendor-dev: require submodule npm-install-dev vendor-without-npm
|
||||
vendor-dev: assert-deps submodule npm-install-dev vendor-without-npm
|
||||
|
||||
.PHONY: bun
|
||||
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
|
||||
|
||||
.PHONY: regenerate-bindings
|
||||
regenerate-bindings: ## compile src/js/builtins + all c++ code, does not link
|
||||
.PHONY: cpp
|
||||
cpp: ## compile src/js/builtins + all c++ code then link
|
||||
@make clean-bindings js
|
||||
@make bindings -j$(CPU_COUNT)
|
||||
@make link
|
||||
|
||||
.PHONY: cpp
|
||||
cpp-no-link:
|
||||
@make clean-bindings js
|
||||
@make bindings -j$(CPU_COUNT)
|
||||
|
||||
.PHONY: zig
|
||||
zig: ## compile zig code then link
|
||||
@make mkdir-dev dev-obj link
|
||||
|
||||
.PHONY: zig-no-link
|
||||
zig-no-link:
|
||||
@make mkdir-dev dev-obj
|
||||
|
||||
.PHONY: dev
|
||||
dev: # combo of `make cpp` and `make zig`
|
||||
@make cpp-no-link zig-no-link -j2
|
||||
@make link
|
||||
|
||||
.PHONY: setup
|
||||
setup: vendor-dev identifier-cache clean-bindings js
|
||||
make jsc-check
|
||||
make bindings -j$(CPU_COUNT)
|
||||
setup: vendor-dev identifier-cache clean-bindings
|
||||
make jsc-check dev
|
||||
@echo ""
|
||||
@echo "Development environment setup complete"
|
||||
@echo "Run \`make dev\` to build \`bun-debug\`"
|
||||
@echo "First build complete!"
|
||||
@echo "\"bun-debug\" is available at $(DEBUG_BIN)/bun-debug"
|
||||
@echo ""
|
||||
|
||||
.PHONY: help
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
## What is Bun?
|
||||
|
||||
> **Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keeps tabs on future releases.
|
||||
> **Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
|
||||
|
||||
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
|
||||
|
||||
@@ -123,7 +123,6 @@ bun upgrade --canary
|
||||
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
|
||||
- [Testing](https://bun.sh/docs/api/test)
|
||||
- [Utils](https://bun.sh/docs/api/utils)
|
||||
- [DNS](https://bun.sh/docs/api/dns)
|
||||
- [Node-API](https://bun.sh/docs/api/node-api)
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { Buffer } from "node:buffer";
|
||||
|
||||
const bigBuffer = Buffer.from("hello world".repeat(10000));
|
||||
const converted = bigBuffer.toString("base64");
|
||||
bench("Buffer.toString('base64')", () => {
|
||||
return bigBuffer.toString("base64");
|
||||
});
|
||||
|
||||
// bench("Buffer.from(str, 'base64')", () => {
|
||||
// return Buffer.from(converted, "base64");
|
||||
// });
|
||||
|
||||
await run();
|
||||
29
bench/snippets/buffer-to-string.mjs
Normal file
29
bench/snippets/buffer-to-string.mjs
Normal file
@@ -0,0 +1,29 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { Buffer } from "node:buffer";
|
||||
import crypto from "node:crypto";
|
||||
|
||||
const bigBuffer = Buffer.from("hello world".repeat(10000));
|
||||
const converted = bigBuffer.toString("base64");
|
||||
const uuid = crypto.randomBytes(16);
|
||||
|
||||
bench(`Buffer(${bigBuffer.byteLength}).toString('base64')`, () => {
|
||||
return bigBuffer.toString("base64");
|
||||
});
|
||||
|
||||
bench(`Buffer(${uuid.byteLength}).toString('base64')`, () => {
|
||||
return uuid.toString("base64");
|
||||
});
|
||||
|
||||
bench(`Buffer(${bigBuffer.byteLength}).toString('hex')`, () => {
|
||||
return bigBuffer.toString("hex");
|
||||
});
|
||||
|
||||
bench(`Buffer(${uuid.byteLength}).toString('hex')`, () => {
|
||||
return uuid.toString("hex");
|
||||
});
|
||||
|
||||
bench(`Buffer(${bigBuffer.byteLength}).toString('ascii')`, () => {
|
||||
return bigBuffer.toString("ascii");
|
||||
});
|
||||
|
||||
await run();
|
||||
37
bench/snippets/rewriter.mjs
Normal file
37
bench/snippets/rewriter.mjs
Normal file
@@ -0,0 +1,37 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const blob = new Blob(["<p id='foo'>Hello</p>"]);
|
||||
bench("prepend", async () => {
|
||||
await new HTMLRewriter()
|
||||
.on("p", {
|
||||
element(element) {
|
||||
element.prepend("Hello");
|
||||
},
|
||||
})
|
||||
.transform(new Response(blob))
|
||||
.text();
|
||||
});
|
||||
|
||||
bench("append", async () => {
|
||||
await new HTMLRewriter()
|
||||
.on("p", {
|
||||
element(element) {
|
||||
element.append("Hello");
|
||||
},
|
||||
})
|
||||
.transform(new Response(blob))
|
||||
.text();
|
||||
});
|
||||
|
||||
bench("getAttribute", async () => {
|
||||
await new HTMLRewriter()
|
||||
.on("p", {
|
||||
element(element) {
|
||||
element.getAttribute("id");
|
||||
},
|
||||
})
|
||||
.transform(new Response(blob))
|
||||
.text();
|
||||
});
|
||||
|
||||
await run();
|
||||
11
build.zig
11
build.zig
@@ -182,16 +182,16 @@ pub fn build(b: *Build) !void {
|
||||
is_debug_build = optimize == OptimizeMode.Debug;
|
||||
const bun_executable_name = if (optimize == std.builtin.OptimizeMode.Debug) "bun-debug" else "bun";
|
||||
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
|
||||
"src/main_wasm.zig"
|
||||
"root_wasm.zig"
|
||||
else
|
||||
"root.zig";
|
||||
|
||||
const min_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
|
||||
const min_version: std.SemanticVersion = if (target.getOsTag() != .freestanding and !target.isWindows())
|
||||
target.getOsVersionMin().semver
|
||||
else
|
||||
.{ .major = 0, .minor = 0, .patch = 0 };
|
||||
|
||||
const max_version: std.SemanticVersion = if (target.getOsTag() != .freestanding)
|
||||
const max_version: std.SemanticVersion = if (target.getOsTag() != .freestanding and !target.isWindows())
|
||||
target.getOsVersionMax().semver
|
||||
else
|
||||
.{ .major = 0, .minor = 0, .patch = 0 };
|
||||
@@ -218,7 +218,6 @@ pub fn build(b: *Build) !void {
|
||||
.argv = &.{
|
||||
"git",
|
||||
"rev-parse",
|
||||
"--short",
|
||||
"HEAD",
|
||||
},
|
||||
.cwd = b.pathFromRoot("."),
|
||||
@@ -322,7 +321,7 @@ pub fn build(b: *Build) !void {
|
||||
const wasm = b.step("bun-wasm", "Build WASM");
|
||||
var wasm_step = b.addStaticLibrary(.{
|
||||
.name = "bun-wasm",
|
||||
.root_source_file = FileSource.relative("src/main_wasm.zig"),
|
||||
.root_source_file = FileSource.relative("root_wasm.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
@@ -332,6 +331,8 @@ pub fn build(b: *Build) !void {
|
||||
// wasm_step.link_emit_relocs = true;
|
||||
// wasm_step.single_threaded = true;
|
||||
try configureObjectStep(b, wasm_step, @TypeOf(target), target, obj.main_pkg_path.?);
|
||||
var build_opts = default_build_options;
|
||||
wasm_step.addOptions("build_options", build_opts.step(b));
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
# Not officially supported (yet)
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
@@ -77,7 +77,7 @@ The standard `Bun.hash` functions uses [Wyhash](https://github.com/wangyi-fudan/
|
||||
|
||||
```ts
|
||||
Bun.hash("some data here");
|
||||
// 976213160445840
|
||||
// 11562320457524636935n
|
||||
```
|
||||
|
||||
The input can be a string, `TypedArray`, `DataView`, `ArrayBuffer`, or `SharedArrayBuffer`.
|
||||
@@ -91,14 +91,14 @@ Bun.hash(arr.buffer);
|
||||
Bun.hash(new DataView(arr.buffer));
|
||||
```
|
||||
|
||||
Optionally, an integer seed can be specified as the second parameter.
|
||||
Optionally, an integer seed can be specified as the second parameter. For 64-bit hashes seeds above `Number.MAX_SAFE_INTEGER` should be given as BigInt to avoid loss of precision.
|
||||
|
||||
```ts
|
||||
Bun.hash("some data here", 1234);
|
||||
// 1173484059023252
|
||||
// 15724820720172937558n
|
||||
```
|
||||
|
||||
Additional hashing algorithms are available as properties on `Bun.hash`. The API is the same for each.
|
||||
Additional hashing algorithms are available as properties on `Bun.hash`. The API is the same for each, only changing the return type from number for 32-bit hashes to bigint for 64-bit hashes.
|
||||
|
||||
```ts
|
||||
Bun.hash.wyhash("data", 1234); // equivalent to Bun.hash()
|
||||
@@ -107,6 +107,7 @@ Bun.hash.adler32("data", 1234);
|
||||
Bun.hash.cityHash32("data", 1234);
|
||||
Bun.hash.cityHash64("data", 1234);
|
||||
Bun.hash.murmur32v3("data", 1234);
|
||||
Bun.hash.murmur32v2("data", 1234);
|
||||
Bun.hash.murmur64v2("data", 1234);
|
||||
```
|
||||
|
||||
|
||||
@@ -428,6 +428,21 @@ const str = Bun.inspect(arr);
|
||||
// => "Uint8Array(3) [ 1, 2, 3 ]"
|
||||
```
|
||||
|
||||
## `Bun.inspect.custom`
|
||||
|
||||
This is the symbol that Bun uses to implement `Bun.inspect`. You can override this to customize how your objects are printed. It is identical to `util.inspect.custom` in Node.js.
|
||||
|
||||
```ts
|
||||
class Foo {
|
||||
[Bun.inspect.custom]() {
|
||||
return "foo";
|
||||
}
|
||||
}
|
||||
|
||||
const foo = new Foo();
|
||||
console.log(foo); // => "foo"
|
||||
```
|
||||
|
||||
## `Bun.nanoseconds()`
|
||||
|
||||
Returns the number of nanoseconds since the current `bun` process started, as a `number`. Useful for high-precision timing and benchmarking.
|
||||
|
||||
@@ -88,7 +88,7 @@ worker.addEventListener("message", event => {
|
||||
|
||||
## Terminating a worker
|
||||
|
||||
A `Worker` instance terminate automatically when Bun's process exits. To terminate a `Worker` sooner, call `worker.terminate()`.
|
||||
A `Worker` instance terminates automatically once it's event loop has no work left to do. Attaching a `"message"` listener on the global or any `MessagePort`s will keep the event loop alive. To forcefully terminate a `Worker`, call `worker.terminate()`.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
@@ -97,18 +97,20 @@ const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
worker.terminate();
|
||||
```
|
||||
|
||||
This will cause the worker's to exit as soon as possible.
|
||||
|
||||
### `process.exit()`
|
||||
|
||||
A worker can terminate itself with `process.exit()`. This does not terminate the main process. Like in Node.js, `process.on('beforeExit', callback)` and `process.on('exit', callback)` are emitted on the worker thread (and not on the main thread).
|
||||
A worker can terminate itself with `process.exit()`. This does not terminate the main process. Like in Node.js, `process.on('beforeExit', callback)` and `process.on('exit', callback)` are emitted on the worker thread (and not on the main thread), and the exit code is passed to the `"close"` event.
|
||||
|
||||
### `"close"`
|
||||
|
||||
The `"close"` event is emitted when a worker has been terminated. It can take some time for the worker to actually terminate, so this event is emitted when the worker has been marked as terminated.
|
||||
The `"close"` event is emitted when a worker has been terminated. It can take some time for the worker to actually terminate, so this event is emitted when the worker has been marked as terminated. The `CloseEvent` will contain the exit code passed to `process.exit()`, or 0 if closed for other reasons.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
|
||||
worker.addEventListener("close", () => {
|
||||
worker.addEventListener("close", event => {
|
||||
console.log("worker is being closed");
|
||||
});
|
||||
```
|
||||
@@ -117,14 +119,27 @@ This event does not exist in browsers.
|
||||
|
||||
## Managing lifetime
|
||||
|
||||
By default, an active `Worker` will _not_ keep the main (spawning) process alive. Once the main script finishes, the main thread will terminate, shutting down any workers it created.
|
||||
By default, an active `Worker` will keep the main (spawning) process alive, so async tasks like `setTimeout` and promises will keep the process alive. Attaching `message` listeners will also keep the `Worker` alive.
|
||||
|
||||
### `worker.ref`
|
||||
### `worker.unref()`
|
||||
|
||||
To keep the process alive until the `Worker` terminates, call `worker.ref()`. This couples the lifetime of the worker to the lifetime of the main process.
|
||||
To stop a running worker from keeping the process alive, call `worker.unref()`. This decouples the lifetime of the worker to the lifetime of the main process, and is equivlent to what Node.js' `worker_threads` does.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
worker.unref();
|
||||
```
|
||||
|
||||
Note: `worker.unref()` is not available in browers.
|
||||
|
||||
### `worker.ref()`
|
||||
|
||||
To keep the process alive until the `Worker` terminates, call `worker.ref()`. A ref'd worker is the default behavior, and still needs something going on in the event loop (such as a `"message"` listener) for the worker to continue running.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
worker.unref();
|
||||
// later...
|
||||
worker.ref();
|
||||
```
|
||||
|
||||
@@ -132,22 +147,11 @@ Alternatively, you can also pass an `options` object to `Worker`:
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href, {
|
||||
ref: true,
|
||||
ref: false,
|
||||
});
|
||||
```
|
||||
|
||||
### `worker.unref`
|
||||
|
||||
To stop keeping the process alive, call `worker.unref()`.
|
||||
|
||||
```ts
|
||||
const worker = new Worker(new URL("worker.ts", import.meta.url).href);
|
||||
worker.ref();
|
||||
// ...later on
|
||||
worker.unref();
|
||||
```
|
||||
|
||||
Note: `worker.ref()` and `worker.unref()` do not exist in browsers.
|
||||
Note: `worker.ref()` is not available in browers.
|
||||
|
||||
## Memory usage with `smol`
|
||||
|
||||
|
||||
@@ -6,15 +6,17 @@ Bun provides a universal plugin API that can be used to extend both the _runtime
|
||||
|
||||
Plugins intercept imports and perform custom loading logic: reading files, transpiling code, etc. They can be used to add support for additional file types, like `.scss` or `.yaml`. In the context of Bun's bundler, plugins can be used to implement framework-level features like CSS extraction, macros, and client-server code co-location.
|
||||
|
||||
For more complete documentation of the Plugin API, see [Runtime > Plugins](/docs/runtime/plugins).
|
||||
|
||||
## Usage
|
||||
|
||||
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function. Register a plugin with Bun using the `plugin` function.
|
||||
|
||||
```tsx#yamlPlugin.ts
|
||||
```tsx#myPlugin.ts
|
||||
import type { BunPlugin } from "bun";
|
||||
|
||||
const myPlugin: BunPlugin = {
|
||||
name: "YAML loader",
|
||||
name: "Custom loader",
|
||||
setup(build) {
|
||||
// implementation
|
||||
},
|
||||
@@ -30,307 +32,3 @@ Bun.build({
|
||||
plugins: [myPlugin],
|
||||
});
|
||||
```
|
||||
|
||||
<!-- It can also be "registered" with the Bun runtime using the `Bun.plugin()` function. Once registered, the currently executing `bun` process will incorporate the plugin into its module resolution algorithm.
|
||||
|
||||
```ts
|
||||
import {plugin} from "bun";
|
||||
|
||||
plugin(myPlugin);
|
||||
``` -->
|
||||
|
||||
## `--preload`
|
||||
|
||||
To consume this plugin, add this file to the `preload` option in your [`bunfig.toml`](/docs/runtime/configuration). Bun automatically loads the files/modules specified in `preload` before running a file.
|
||||
|
||||
```toml
|
||||
preload = ["./yamlPlugin.ts"]
|
||||
```
|
||||
|
||||
To preload files during `bun test`:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
preload = ["./loader.ts"]
|
||||
```
|
||||
|
||||
{% details summary="Usage without preload" %}
|
||||
|
||||
Alternatively, you can import this file manually at the top of your project's entrypoint, before any application code is imported.
|
||||
|
||||
```ts#app.ts
|
||||
import "./yamlPlugin.ts";
|
||||
import { config } from "./config.yml";
|
||||
|
||||
console.log(config);
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Third-party plugins
|
||||
|
||||
By convention, third-party plugins intended for consumption should export a factory function that accepts some configuration and returns a plugin object.
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
import fooPlugin from "bun-plugin-foo";
|
||||
|
||||
plugin(
|
||||
fooPlugin({
|
||||
// configuration
|
||||
}),
|
||||
);
|
||||
|
||||
// application code
|
||||
```
|
||||
|
||||
Bun's plugin API is based on [esbuild](https://esbuild.github.io/plugins). Only [a subset](/docs/bundler/vs-esbuild#plugin-api) of the esbuild API is implemented, but some esbuild plugins "just work" in Bun, like the official [MDX loader](https://mdxjs.com/packages/esbuild/):
|
||||
|
||||
```jsx
|
||||
import { plugin } from "bun";
|
||||
import mdx from "@mdx-js/esbuild";
|
||||
|
||||
plugin(mdx());
|
||||
|
||||
import { renderToStaticMarkup } from "react-dom/server";
|
||||
import Foo from "./bar.mdx";
|
||||
console.log(renderToStaticMarkup(<Foo />));
|
||||
```
|
||||
|
||||
## Loaders
|
||||
|
||||
<!-- The plugin logic is implemented in the `setup` function using the builder provided as the first argument (`build` in the example above). The `build` variable provides two methods: `onResolve` and `onLoad`. -->
|
||||
|
||||
<!-- ## `onResolve` -->
|
||||
|
||||
<!-- The `onResolve` method lets you intercept imports that match a particular regex and modify the resolution behavior, such as re-mapping the import to another file. In the simplest case, you can simply remap the matched import to a new path.
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "YAML loader",
|
||||
setup(build) {
|
||||
build.onResolve();
|
||||
// implementation
|
||||
},
|
||||
});
|
||||
``` -->
|
||||
|
||||
<!--
|
||||
Internally, Bun's transpiler automatically turns `plugin()` calls into separate files (at most 1 per file). This lets loaders activate before the rest of your application runs with zero configuration. -->
|
||||
|
||||
Plugins are primarily used to extend Bun with loaders for additional file types. Let's look at a simple plugin that implements a loader for `.yaml` files.
|
||||
|
||||
```ts#yamlPlugin.ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "YAML",
|
||||
async setup(build) {
|
||||
const { load } = await import("js-yaml");
|
||||
const { readFileSync } = await import("fs");
|
||||
|
||||
// when a .yaml file is imported...
|
||||
build.onLoad({ filter: /\.(yaml|yml)$/ }, (args) => {
|
||||
|
||||
// read and parse the file
|
||||
const text = readFileSync(args.path, "utf8");
|
||||
const exports = load(text) as Record<string, any>;
|
||||
|
||||
// and returns it as a module
|
||||
return {
|
||||
exports,
|
||||
loader: "object", // special loader for JS objects
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
With this plugin, data can be directly imported from `.yaml` files.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#index.ts
|
||||
import "./yamlPlugin.ts"
|
||||
import {name, releaseYear} from "./data.yml"
|
||||
|
||||
console.log(name, releaseYear);
|
||||
```
|
||||
|
||||
```yaml#data.yml
|
||||
name: Fast X
|
||||
releaseYear: 2023
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
Note that the returned object has a `loader` property. This tells Bun which of its internal loaders should be used to handle the result. Even though we're implementing a loader for `.yaml`, the result must still be understandable by one of Bun's built-in loaders. It's loaders all the way down.
|
||||
|
||||
In this case we're using `"object"`—a built-in loader (intended for use by plugins) that converts a plain JavaScript object to an equivalent ES module. Any of Bun's built-in loaders are supported; these same loaders are used by Bun internally for handling files of various kinds. The table below is a quick reference; refer to [Bundler > Loaders](/docs/bundler/loaders) for complete documentation.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Loader
|
||||
- Extensions
|
||||
- Output
|
||||
|
||||
---
|
||||
|
||||
- `js`
|
||||
- `.mjs` `.cjs`
|
||||
- Transpile to JavaScript files
|
||||
|
||||
---
|
||||
|
||||
- `jsx`
|
||||
- `.js` `.jsx`
|
||||
- Transform JSX then transpile
|
||||
|
||||
---
|
||||
|
||||
- `ts`
|
||||
- `.ts` `.mts` `cts`
|
||||
- Transform TypeScript then transpile
|
||||
|
||||
---
|
||||
|
||||
- `tsx`
|
||||
- `.tsx`
|
||||
- Transform TypeScript, JSX, then transpile
|
||||
|
||||
---
|
||||
|
||||
- `toml`
|
||||
- `.toml`
|
||||
- Parse using Bun's built-in TOML parser
|
||||
|
||||
---
|
||||
|
||||
- `json`
|
||||
- `.json`
|
||||
- Parse using Bun's built-in JSON parser
|
||||
|
||||
---
|
||||
|
||||
- `napi`
|
||||
- `.node`
|
||||
- Import a native Node.js addon
|
||||
|
||||
---
|
||||
|
||||
- `wasm`
|
||||
- `.wasm`
|
||||
- Import a native Node.js addon
|
||||
|
||||
---
|
||||
|
||||
- `object`
|
||||
- _none_
|
||||
- A special loader intended for plugins that converts a plain JavaScript object to an equivalent ES module. Each key in the object corresponds to a named export.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
Loading a YAML file is useful, but plugins support more than just data loading. Let's look at a plugin that lets Bun import `*.svelte` files.
|
||||
|
||||
```ts#sveltePlugin.ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
await plugin({
|
||||
name: "svelte loader",
|
||||
async setup(build) {
|
||||
const { compile } = await import("svelte/compiler");
|
||||
const { readFileSync } = await import("fs");
|
||||
|
||||
// when a .svelte file is imported...
|
||||
build.onLoad({ filter: /\.svelte$/ }, ({ path }) => {
|
||||
|
||||
// read and compile it with the Svelte compiler
|
||||
const file = readFileSync(path, "utf8");
|
||||
const contents = compile(file, {
|
||||
filename: path,
|
||||
generate: "ssr",
|
||||
}).js.code;
|
||||
|
||||
// and return the compiled source code as "js"
|
||||
return {
|
||||
contents,
|
||||
loader: "js",
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
> Note: in a production implementation, you'd want to cache the compiled output and include additional error handling.
|
||||
|
||||
The object returned from `build.onLoad` contains the compiled source code in `contents` and specifies `"js"` as its loader. That tells Bun to consider the returned `contents` to be a JavaScript module and transpile it using Bun's built-in `js` loader.
|
||||
|
||||
With this plugin, Svelte components can now be directly imported and consumed.
|
||||
|
||||
```js
|
||||
import "./sveltePlugin.ts";
|
||||
import MySvelteComponent from "./component.svelte";
|
||||
|
||||
console.log(mySvelteComponent.render());
|
||||
```
|
||||
|
||||
## Reading the config
|
||||
|
||||
Plugins can read and write to the [build config](/docs/bundler#api) with `build.config`.
|
||||
|
||||
```ts
|
||||
Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
sourcemap: "external",
|
||||
plugins: [
|
||||
{
|
||||
name: "demo",
|
||||
setup(build) {
|
||||
console.log(build.config.sourcemap); // "external"
|
||||
|
||||
build.config.minify = true; // enable minification
|
||||
|
||||
// `plugins` is readonly
|
||||
console.log(`Number of plugins: ${build.config.plugins.length}`);
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
```ts
|
||||
namespace Bun {
|
||||
function plugin(plugin: {
|
||||
name: string;
|
||||
setup: (build: PluginBuilder) => void;
|
||||
}): void;
|
||||
}
|
||||
|
||||
type PluginBuilder = {
|
||||
onResolve: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string; importer: string }) => {
|
||||
path: string;
|
||||
namespace?: string;
|
||||
} | void,
|
||||
) => void;
|
||||
onLoad: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string }) => {
|
||||
loader?: Loader;
|
||||
contents?: string;
|
||||
exports?: Record<string, any>;
|
||||
},
|
||||
) => void;
|
||||
config: BuildConfig;
|
||||
};
|
||||
|
||||
type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "object";
|
||||
```
|
||||
|
||||
The `onLoad` method optionally accepts a `namespace` in addition to the `filter` regex. This namespace will be be used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.
|
||||
|
||||
@@ -897,7 +897,7 @@ const myPlugin: BunPlugin = {
|
||||
};
|
||||
```
|
||||
|
||||
The `builder` object provides some methods for hooking into parts of the bundling process. Bun implements `onResolve` and `onLoad`; it does not yet implement the esbuild hooks `onStart`, `onEnd`, and `onDispose`, and `resolve` utilities. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use [`config`](/docs/bundler/plugins#reading-the-config) (same thing but with Bun's `BuildConfig` format) instead.
|
||||
The `builder` object provides some methods for hooking into parts of the bundling process. Bun implements `onResolve` and `onLoad`; it does not yet implement the esbuild hooks `onStart`, `onEnd`, and `onDispose`, and `resolve` utilities. `initialOptions` is partially implemented, being read-only and only having a subset of esbuild's options; use [`config`](/docs/bundler/plugins) (same thing but with Bun's `BuildConfig` format) instead.
|
||||
|
||||
```ts
|
||||
import type { BunPlugin } from "bun";
|
||||
|
||||
@@ -30,14 +30,50 @@ The runner recursively searches the working directory for files that match the f
|
||||
- `*.spec.{js|jsx|ts|tsx}`
|
||||
- `*_spec.{js|jsx|ts|tsx}`
|
||||
|
||||
You can filter the set of tests to run by passing additional positional arguments to `bun test`. Any file in the directory with an _absolute path_ that contains one of the filters will run. Commonly, these filters will be file or directory names; glob patterns are not yet supported.
|
||||
You can filter the set of _test files_ to run by passing additional positional arguments to `bun test`. Any test file with a path that matches one of the filters will run. Commonly, these filters will be file or directory names; glob patterns are not yet supported.
|
||||
|
||||
```bash
|
||||
$ bun test <filter> <filter> ...
|
||||
```
|
||||
|
||||
To filter by _test name_, use the `-t`/`--test-name-pattern` flag.
|
||||
|
||||
```sh
|
||||
# run all tests or test suites with "addition" in the name
|
||||
$ bun test --test-name-pattern addition
|
||||
```
|
||||
|
||||
The test runner runs all tests in a single process. It loads all `--preload` scripts (see [Lifecycle](/docs/test/lifecycle) for details), then runs all tests. If a test fails, the test runner will exit with a non-zero exit code.
|
||||
|
||||
## Timeouts
|
||||
|
||||
Use the `--timeout` flag to specify a _per-test_ timeout in milliseconds. If a test times out, it will be marked as failed. The default value is `5000`.
|
||||
|
||||
```bash
|
||||
# default value is 5000
|
||||
$ bun test --timeout 20
|
||||
```
|
||||
|
||||
## Rerun tests
|
||||
|
||||
Use the `--rerun-each` flag to run each test multiple times. This is useful for detecting flaky or non-deterministic test failures.
|
||||
|
||||
```sh
|
||||
$ bun test --rerun-each 100
|
||||
```
|
||||
|
||||
## Bail out with `--bail`
|
||||
|
||||
Use the `--bail` flag to abort the test run early after a pre-determined number of test failures. By default Bun will run all tests and report all failures, but sometimes in CI environments it's preferable to terminate earlier to reduce CPU usage.
|
||||
|
||||
```sh
|
||||
# bail after 1 failure
|
||||
$ bun test --bail
|
||||
|
||||
# bail after 10 failure
|
||||
$ bun test --bail 10
|
||||
```
|
||||
|
||||
## Watch mode
|
||||
|
||||
Similar to `bun run`, you can pass the `--watch` flag to `bun test` to watch for changes and re-run tests.
|
||||
|
||||
76
docs/guides/ecosystem/astro.md
Normal file
76
docs/guides/ecosystem/astro.md
Normal file
@@ -0,0 +1,76 @@
|
||||
---
|
||||
name: Build an app with Astro and Bun
|
||||
---
|
||||
|
||||
Initialize a fresh Astro app with `bunx create-astro`. The `create-astro` package detects when you are using `bunx` and will automatically install dependencies using `bun`.
|
||||
|
||||
```sh
|
||||
$ bunx create-astro
|
||||
╭─────╮ Houston:
|
||||
│ ◠ ◡ ◠ We're glad to have you on board.
|
||||
╰─────╯
|
||||
|
||||
astro v2.10.5 Launch sequence initiated.
|
||||
|
||||
dir Where should we create your new project?
|
||||
./fumbling-field
|
||||
|
||||
tmpl How would you like to start your new project?
|
||||
Use blog template
|
||||
✔ Template copied
|
||||
|
||||
deps Install dependencies?
|
||||
Yes
|
||||
✔ Dependencies installed
|
||||
|
||||
ts Do you plan to write TypeScript?
|
||||
Yes
|
||||
|
||||
use How strict should TypeScript be?
|
||||
Strict
|
||||
✔ TypeScript customized
|
||||
|
||||
git Initialize a new git repository?
|
||||
Yes
|
||||
✔ Git initialized
|
||||
|
||||
next Liftoff confirmed. Explore your project!
|
||||
|
||||
Enter your project directory using cd ./fumbling-field
|
||||
Run `bun run dev` to start the dev server. CTRL+C to stop.
|
||||
Add frameworks like react or tailwind using astro add.
|
||||
|
||||
Stuck? Join us at https://astro.build/chat
|
||||
|
||||
╭─────╮ Houston:
|
||||
│ ◠ ◡ ◠ Good luck out there, astronaut! 🚀
|
||||
╰─────╯
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Start the dev server with `bunx`.
|
||||
|
||||
By default, Bun will run the dev server with Node.js. To use the Bun runtime instead, use the `--bun` flag.
|
||||
|
||||
```sh
|
||||
$ bunx --bun astro dev
|
||||
🚀 astro v2.10.5 started in 200ms
|
||||
|
||||
┃ Local http://localhost:3000/
|
||||
┃ Network use --host to expose
|
||||
|
||||
01:48:34 PM [content] Watching src/content/ for changes
|
||||
01:48:34 PM [content] Types generated
|
||||
01:48:34 PM [astro] update /.astro/types.d.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. Astro will hot-reload your app as you edit your source files.
|
||||
|
||||
{% image src="https://github.com/vitejs/vite/assets/3084745/bb1d5063-32f4-4598-b33e-50b44a1c4e8a" caption="An Astro starter app running on Bun" %}
|
||||
|
||||
---
|
||||
|
||||
Refer to the [Astro docs](https://docs.astro.build/en/getting-started/) for complete documentation.
|
||||
@@ -29,3 +29,7 @@ To start the dev server, run `bun run dev` from the project root.
|
||||
$ cd my-app
|
||||
$ bun run dev
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. Any changes you make to `pages/index.tsx` will be hot-reloaded in the browser.
|
||||
|
||||
65
docs/guides/ecosystem/nuxt.md
Normal file
65
docs/guides/ecosystem/nuxt.md
Normal file
@@ -0,0 +1,65 @@
|
||||
---
|
||||
name: Build an app with Nuxt and Bun
|
||||
---
|
||||
|
||||
Bun supports [Nuxt](https://nuxt.com) out of the box. Initialize a Nuxt app with official `nuxi` CLI.
|
||||
|
||||
```sh
|
||||
$ bunx nuxi init my-nuxt-app
|
||||
Nuxi 3.6.5
|
||||
✨ Nuxt project is created with v3 template. Next steps:
|
||||
› cd my-nuxt-app
|
||||
› Install dependencies with npm install or yarn install or pnpm install
|
||||
› Start development server with npm run dev or yarn dev or pnpm run dev
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then move into the project directory and install dependencies.
|
||||
|
||||
```sh
|
||||
$ cd my-app
|
||||
$ bun install
|
||||
bun install v0.8.0
|
||||
+ @nuxt/devtools@0.8.0
|
||||
+ @types/node@18.17.6
|
||||
+ nuxt@3.6.5
|
||||
Nuxi 3.6.5
|
||||
✔ Types generated in .nuxt
|
||||
|
||||
776 packages installed [1.72s]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To start the dev server, run `bun run dev` from the project root. This will execute the `nuxt dev` command (as defined in the `"dev"` script in `package.json`).
|
||||
|
||||
{% callout %}
|
||||
The `nuxt` CLI uses Node.js by default; passing the `--bun` flag forces the dev server to use the Bun runtime instead.
|
||||
{% /callout %}
|
||||
|
||||
```
|
||||
$ bun --bun run dev
|
||||
$ nuxt dev
|
||||
Nuxi 3.6.5
|
||||
Nuxt 3.6.5 with Nitro 2.5.2
|
||||
> Local: http://localhost:3000/
|
||||
> Network: http://192.168.0.21:3000/
|
||||
> Network: http://[fd8a:d31d:481c:4883:1c64:3d90:9f83:d8a2]:3000/
|
||||
|
||||
✔ Nuxt DevTools is enabled v0.8.0 (experimental)
|
||||
ℹ Vite client warmed up in 547ms
|
||||
✔ Nitro built in 244 ms
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Once the dev server spins up, open [http://localhost:3000](http://localhost:3000) to see the app. The app will render Nuxt's built-in `WelcomePage` template component.
|
||||
|
||||
To start developing your app, replace `<WelcomePage />` in `app.vue` with your own UI.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/2c683ecc-3298-4bb0-b8c0-cf4cfaea1daa" caption="Demo Nuxt app running on localhost" /%}
|
||||
|
||||
---
|
||||
|
||||
Refer to the [Nuxt website](https://nuxt.com/docs) for complete documentation.
|
||||
60
docs/guides/ecosystem/remix.md
Normal file
60
docs/guides/ecosystem/remix.md
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
name: Build an app with Remix and Bun
|
||||
---
|
||||
|
||||
{% callout %}
|
||||
Remix currently relies on Node.js APIs that Bun does not yet implement. The guide below uses Bun to initialize a project and install dependencies, but it uses Node.js to run the dev server.
|
||||
{% /callout %}
|
||||
|
||||
---
|
||||
|
||||
Initialize a Remix app with `create-remix`.
|
||||
|
||||
```sh
|
||||
$ bunx create-remix
|
||||
|
||||
remix v1.19.3 💿 Let's build a better website...
|
||||
|
||||
dir Where should we create your new project?
|
||||
./my-app
|
||||
|
||||
◼ Using basic template See https://remix.run/docs/pages/templates for more
|
||||
✔ Template copied
|
||||
|
||||
git Initialize a new git repository?
|
||||
Yes
|
||||
|
||||
deps Install dependencies with bun?
|
||||
Yes
|
||||
|
||||
✔ Dependencies installed
|
||||
|
||||
✔ Git initialized
|
||||
|
||||
done That's it!
|
||||
|
||||
Enter your project directory using cd ./my-app
|
||||
Check out README.md for development and deploy instructions.
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To start the dev server, run `bun run dev` from the project root. This will start the dev server using the `remix dev` command. Note that Node.js will be used to run the dev server.
|
||||
|
||||
```sh
|
||||
$ cd my-app
|
||||
$ bun run dev
|
||||
$ remix dev
|
||||
|
||||
💿 remix dev
|
||||
|
||||
info building...
|
||||
info built (263ms)
|
||||
Remix App Server started at http://localhost:3000 (http://172.20.0.143:3000)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) to see the app. Any changes you make to `app/routes/_index.tsx` will be hot-reloaded in the browser.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/c26f1059-a5d4-4c0b-9a88-d9902472fd77" caption="Remix app running on localhost" /%}
|
||||
58
docs/guides/ecosystem/solidstart.md
Normal file
58
docs/guides/ecosystem/solidstart.md
Normal file
@@ -0,0 +1,58 @@
|
||||
---
|
||||
name: Build an app with SolidStart and Bun
|
||||
---
|
||||
|
||||
{% callout %}
|
||||
SolidStart currently relies on Node.js APIs that Bun does not yet implement. The guide below uses Bun to initialize a project and install dependencies, but it uses Node.js to run the dev server.
|
||||
{% /callout %}
|
||||
|
||||
---
|
||||
|
||||
Initialize a SolidStart app with `create-solid`.
|
||||
|
||||
```sh
|
||||
$ bunx create-solid my-app
|
||||
create-solid version 0.2.31
|
||||
|
||||
Welcome to the SolidStart setup wizard!
|
||||
|
||||
There are definitely bugs and some feature might not work yet.
|
||||
If you encounter an issue, have a look at
|
||||
https://github.com/solidjs/solid-start/issues and open a new one,
|
||||
if it is not already tracked.
|
||||
|
||||
✔ Which template do you want to use? › todomvc
|
||||
✔ Server Side Rendering? … yes
|
||||
✔ Use TypeScript? … yes
|
||||
cloned solidjs/solid-start#main to /path/to/my-app/.solid-start
|
||||
✔ Copied project files
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
As instructed by the `create-solid` CLI, lets install our dependencies.
|
||||
|
||||
```sh
|
||||
$ cd my-app
|
||||
$ bun install
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then run the development server.
|
||||
|
||||
```sh
|
||||
$ bun run dev
|
||||
# or, equivalently
|
||||
$ bunx solid-start dev
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open [localhost:3000](http://localhost:3000). Any changes you make to `src/routes/index.tsx` will be hot-reloaded automatically.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/1e8043c4-49d1-498c-9add-c1eaab6c7167" alt="SolidStart demo app" /%}
|
||||
|
||||
---
|
||||
|
||||
Refer to the [SolidStart website](start.solidjs.com/getting-started/what-is-solidstart) for complete framework documentation.
|
||||
@@ -16,7 +16,7 @@ Use `bun init` to create an empty project.
|
||||
$ mkdir myapp
|
||||
$ cd myapp
|
||||
$ bun init
|
||||
$ bun add @stricjs/router
|
||||
$ bun add @stricjs/router @stricjs/utils
|
||||
```
|
||||
|
||||
---
|
||||
@@ -32,12 +32,14 @@ export default new Router()
|
||||
|
||||
---
|
||||
|
||||
To serve static files from `/public/*`:
|
||||
To serve static files from `/public`:
|
||||
|
||||
```ts#index.ts
|
||||
import { dir } from '@stricjs/utils';
|
||||
|
||||
export default new Router()
|
||||
.get('/', () => new Response('Hi'))
|
||||
.get('/public/*', stream('.'));
|
||||
.get('/*', dir('./public'));
|
||||
```
|
||||
|
||||
---
|
||||
@@ -50,4 +52,4 @@ $ bun --watch run index.ts
|
||||
|
||||
---
|
||||
|
||||
For more info, see Stric's [documentation](https://stricjs.gitbook.io/docs).
|
||||
For more info, see Stric's [documentation](https://stricjs.netlify.app).
|
||||
|
||||
65
docs/guides/ecosystem/sveltekit.md
Normal file
65
docs/guides/ecosystem/sveltekit.md
Normal file
@@ -0,0 +1,65 @@
|
||||
---
|
||||
name: Build an app with SvelteKit and Bun
|
||||
---
|
||||
|
||||
Use `bunx` to scaffold your app with the `create-svelte` CLI. Answer the prompts to slect a template and set up your development environment.
|
||||
|
||||
```sh
|
||||
$ bunx create-svelte my-app
|
||||
┌ Welcome to SvelteKit!
|
||||
│
|
||||
◇ Which Svelte app template?
|
||||
│ SvelteKit demo app
|
||||
│
|
||||
◇ Add type checking with TypeScript?
|
||||
│ Yes, using TypeScript syntax
|
||||
│
|
||||
◇ Select additional options (use arrow keys/space bar)
|
||||
│ None
|
||||
│
|
||||
└ Your project is ready!
|
||||
|
||||
✔ Typescript
|
||||
Inside Svelte components, use <script lang="ts">
|
||||
|
||||
Install community-maintained integrations:
|
||||
https://github.com/svelte-add/svelte-add
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Once the project is initialized, `cd` into the new project and install dependencies.
|
||||
|
||||
```sh
|
||||
$ cd my-app
|
||||
$ bun install
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then start the development server with `bun --bun run dev`.
|
||||
|
||||
To run the dev server with Node.js instead of Bun, you can omit the `--bun` flag.
|
||||
|
||||
```sh
|
||||
$ bun --bun run dev
|
||||
$ vite dev
|
||||
|
||||
Forced re-optimization of dependencies
|
||||
|
||||
VITE v4.4.9 ready in 895 ms
|
||||
|
||||
➜ Local: http://localhost:5173/
|
||||
➜ Network: use --host to expose
|
||||
➜ press h to show help
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Visit [http://localhost:5173](http://localhost:5173/) in a browser to see the template app.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/7c76eae8-78f9-44fa-9f15-1bd3ca1a47c0" /%}
|
||||
|
||||
---
|
||||
|
||||
If you edit and save `src/routes/+page.svelte`, you should see your changes hot-reloaded in the browser.
|
||||
@@ -17,7 +17,7 @@ $ bun install --yarn
|
||||
To set this as the default behavior, add the following to your `bunfig.toml` file.
|
||||
|
||||
```toml#bunfig.toml
|
||||
[install]
|
||||
[install.lockfile]
|
||||
print = "yarn"
|
||||
```
|
||||
|
||||
|
||||
82
docs/guides/runtime/web-debugger.md
Normal file
82
docs/guides/runtime/web-debugger.md
Normal file
@@ -0,0 +1,82 @@
|
||||
---
|
||||
name: Debugging Bun with the web debugger
|
||||
---
|
||||
|
||||
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-vscode/types/jsc.d.ts). To enable debugging when running code with Bun, use the `--inspect` flag. For demonstration purposes, consider the following simple web server.
|
||||
|
||||
```ts#server.ts
|
||||
Bun.serve({
|
||||
fetch(req){
|
||||
console.log(req.url);
|
||||
return new Response("Hello, world!");
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Let's run this file with the `--inspect` flag.
|
||||
|
||||
This automatically starts a WebSocket server on an available port that can be used to introspect the running Bun process. Various debugging tools can connect to this server to provide an interactive debugging experience.
|
||||
|
||||
Bun hosts a web-based debugger at [debug.bun.sh](https://debug.bun.sh). It is a modified version of WebKit's [Web Inspector Interface](https://webkit.org/web-inspector/web-inspector-interface/), which will look familiar to Safari users.
|
||||
|
||||
```sh
|
||||
$ bun --inspect server.ts
|
||||
------------------ Bun Inspector ------------------
|
||||
Listening at:
|
||||
ws://localhost:6499/0tqxs9exrgrm
|
||||
|
||||
Inspect in browser:
|
||||
https://debug.bun.sh/#localhost:6499/0tqxs9exrgrm
|
||||
------------------ Bun Inspector ------------------
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open the provided `debug.bun.sh` URL in your browser to start a debugging session. From this interface, you'll be able to view the source code of the running file, view and set breakpoints, and execute code with the built-in console.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/e6a976a8-80cc-4394-8925-539025cc025d" alt="Screenshot of Bun debugger, Console tab" /%}
|
||||
|
||||
---
|
||||
|
||||
Let's set a breakpoint. Navigate to the Sources tab; you should see the code from earlier. Click on the line number `3` to set a breakpoint on our `console.log(req.url)` statement.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/3b69c7e9-25ff-4f9d-acc4-caa736862935" alt="screenshot of Bun debugger" /%}
|
||||
|
||||
---
|
||||
|
||||
Then visit [`http://localhost:3000`](http://localhost:3000) in your web browser. This will send an HTTP request to our `localhost` web server. It will seem like the page isn't loading. Why? Because the program has paused execution at the breakpoint we set earlier.
|
||||
|
||||
Note how the UI has changed.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/8b565e58-5445-4061-9bc4-f41090dfe769" alt="screenshot of Bun debugger" /%}
|
||||
|
||||
---
|
||||
|
||||
At this point there's a lot we can do to introspect the current execution environment. We can use the console at the bottom to run arbitrary code in the context of the program, with full access to the variables in scope at our breakpoint.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/f4312b76-48ba-4a7d-b3b6-6205968ac681" /%}
|
||||
|
||||
---
|
||||
|
||||
On the right side of the Sources pane, we can see all local variables currently in scope, and drill down to see their properties and methods. Here, we're inspecting the `req` variable.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/63d7f843-5180-489c-aa94-87c486e68646" /%}
|
||||
|
||||
---
|
||||
|
||||
In the upper left of the Sources pane, we can control the execution of the program.
|
||||
|
||||
{% image src="https://github.com/oven-sh/bun/assets/3084745/41b76deb-7371-4461-9d5d-81b5a6d2f7a4" /%}
|
||||
|
||||
---
|
||||
|
||||
Here's a cheat sheet explaining the functions of the control flow buttons.
|
||||
|
||||
- _Continue script execution_ — continue running the program until the next breakpoint or exception.
|
||||
- _Step over_ — The program will continue to the next line.
|
||||
- _Step into_ — If the current statement contains a function call, the debugger will "step into" the called function.
|
||||
- _Step out_ — If the current statement is a function call, the debugger will finish executing the call, then "step out" of the function to the location where it was called.
|
||||
|
||||
{% image src="https://github-production-user-asset-6210df.s3.amazonaws.com/3084745/261510346-6a94441c-75d3-413a-99a7-efa62365f83d.png" /%}
|
||||
@@ -4,19 +4,54 @@ Running `bun install` will create a binary lockfile called `bun.lockb`.
|
||||
|
||||
In a word: Performance. Bun’s lockfile saves & loads incredibly quickly, and saves a lot more data than what is typically inside lockfiles.
|
||||
|
||||
#### How do I inspect it?
|
||||
#### How do I inspect Bun's lockfile?
|
||||
|
||||
Run `bun install -y` to generate a Yarn-compatible `yarn.lock` (v1) that can be inspected more easily.
|
||||
|
||||
#### How do I `git diff` Bun's lockfile?
|
||||
|
||||
To add to the global gitattributes file:
|
||||
|
||||
- First try `$XDG_CONFIG_HOME/git/attributes`
|
||||
- If `$XDG_CONFIG_HOME` is not set, try `~/.config/git/attributes`
|
||||
|
||||
For example, on macOS, add the following to `~/.config/git/attributes`:
|
||||
|
||||
```
|
||||
*.lockb diff=lockb
|
||||
```
|
||||
|
||||
Then add the following to `~/.gitconfig`:
|
||||
|
||||
```
|
||||
[diff "lockb"]
|
||||
textconv = bun
|
||||
binary = true
|
||||
```
|
||||
|
||||
To only add to the local gitattributes file:
|
||||
|
||||
```sh
|
||||
$ git config diff.lockb.textconv bun
|
||||
$ git config diff.lockb.binary true
|
||||
```
|
||||
|
||||
**Why this works:**
|
||||
|
||||
- `textconv` tells git to run `bun` on the file before diffing
|
||||
- `binary` tells git to treat the file as binary (so it doesn't try to diff it line-by-line)
|
||||
|
||||
Running `bun` on a lockfile will print a human-readable diff. So we just need to tell `git` to run `bun` on the lockfile before diffing it.
|
||||
|
||||
#### Platform-specific dependencies?
|
||||
|
||||
Bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won’t change between platforms/architectures even if the packages ultimately installed do change.
|
||||
|
||||
#### What does the lockfile store?
|
||||
#### What does Bun's lockfile store?
|
||||
|
||||
Packages, metadata for those packages, the hoisted install order, dependencies for each package, what packages those dependencies resolved to, an integrity hash (if available), what each package was resolved to, and which version (or equivalent).
|
||||
|
||||
#### Why is it fast?
|
||||
#### Why is Bun's lockfile fast?
|
||||
|
||||
It uses linear arrays for all data. [Packages](https://github.com/oven-sh/bun/blob/be03fc273a487ac402f19ad897778d74b6d72963/src/install/install.zig#L1825) are referenced by an auto-incrementing integer ID or a hash of the package name. Strings longer than 8 characters are de-duplicated. Prior to saving on disk, the lockfile is garbage-collected & made deterministic by walking the package tree and cloning the packages in dependency order.
|
||||
|
||||
|
||||
11
docs/nav.ts
11
docs/nav.ts
@@ -110,6 +110,9 @@ export default {
|
||||
page("runtime/nodejs-apis", "Node.js compatibility", {
|
||||
description: `Bun aims for full Node.js compatibility. This page tracks the current compatibility status.`,
|
||||
}),
|
||||
page("runtime/plugins", "Plugins", {
|
||||
description: `Implement custom loaders and module resolution logic with Bun's plugin system.`,
|
||||
}),
|
||||
|
||||
// page("runtime/nodejs", "Node.js compatibility", {
|
||||
// description: `Track the status of Bun's API compatibility with Node.js.`,
|
||||
@@ -172,7 +175,7 @@ export default {
|
||||
description: `Implement custom loaders and module resolution logic with Bun's plugin system.`,
|
||||
}),
|
||||
page("bundler/executables", "Executables", {
|
||||
description: "Compile a TypeScript or JavaScript file to a standalone cross-platform executable",
|
||||
description: "Compile a TypeScript or JavaScript file to a standalone executable",
|
||||
}),
|
||||
page("bundler/macros", "Macros", {
|
||||
description: `Run JavaScript functions at bundle-time and inline the results into your bundle`,
|
||||
@@ -185,13 +188,13 @@ export default {
|
||||
page("cli/test", "`bun test`", {
|
||||
description: "Bun's test runner uses Jest-compatible syntax but runs 100x faster.",
|
||||
}),
|
||||
page("test/hot", "Watch mode", {
|
||||
description: "Reload your tests automatically on change.",
|
||||
}),
|
||||
page("test/writing", "Writing tests", {
|
||||
description:
|
||||
"Write your tests using Jest-like expect matchers, plus setup/teardown hooks, snapshot testing, and more",
|
||||
}),
|
||||
page("test/hot", "Watch mode", {
|
||||
description: "Reload your tests automatically on change.",
|
||||
}),
|
||||
page("test/lifecycle", "Lifecycle hooks", {
|
||||
description: "Add lifecycle hooks to your tests that run before/after each test or test run",
|
||||
}),
|
||||
|
||||
@@ -115,20 +115,18 @@ $ bun install -g @oven/zig
|
||||
$ zigup 0.11.0-dev.4006+bf827d0b5
|
||||
```
|
||||
|
||||
## Building
|
||||
{% callout %}
|
||||
We last updated Zig on **July 18th, 2023**
|
||||
{% /callout %}
|
||||
|
||||
After cloning the repository, run the following command. The runs
|
||||
## First Build
|
||||
|
||||
After cloning the repository, run the following command to run the first build. This may take a while as it will clone submodules and build dependencies.
|
||||
|
||||
```bash
|
||||
$ make setup
|
||||
```
|
||||
|
||||
Then to build Bun:
|
||||
|
||||
```bash
|
||||
$ make dev
|
||||
```
|
||||
|
||||
The binary will be located at `packages/debug-bun-{platform}-{arch}/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, lets print the version number on the development build of Bun.
|
||||
|
||||
```bash
|
||||
@@ -136,16 +134,78 @@ $ packages/debug-bun-*/bun-debug --version
|
||||
bun 0.x.y__dev
|
||||
```
|
||||
|
||||
Note: `make setup` is just an alias for the following:
|
||||
|
||||
```bash
|
||||
$ make assert-deps submodule npm-install-dev node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64 cpp zig link
|
||||
```
|
||||
|
||||
## Rebuilding
|
||||
|
||||
Bun uses a series of make commands to rebuild parts of the codebase. The general rule for rebuilding is there is `make link` to rerun the linker, and then different make targets for different parts of the codebase. Do not pass `-j` to make as these scripts will break if run out of order, and multiple cores will be used when possible during the builds.
|
||||
|
||||
| What changed | Run this command |
|
||||
| ------------------------------------ | --------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Zig Code | `make zig` |
|
||||
| C++ Code | `make cpp` |
|
||||
| Zig + C++ Code | `make dev` (combination of the above two) |
|
||||
| JS/TS Code in `src/js` | `make js` (in bun-debug, js is loaded from disk without a recompile). If you change the names of any file or add/remove anything, you must also run `make dev`. |
|
||||
| `*.classes.ts` | `make generate-classes dev` |
|
||||
| JSSink | `make generate-sink cpp` |
|
||||
| `src/node_fallbacks/*` | `make node-fallbacks zig` |
|
||||
| `identifier_data.zig` | `make identifier-cache zig` |
|
||||
| Code using `cppFn`/`JSC.markBinding` | `make headers` (TODO: explain explain what this is used for and why it's useful) |
|
||||
|
||||
`make setup` cloned a bunch of submodules and built the subprojects. When a submodule is out of date, run `make submodule` to quickly reset/update all your submodules, then you can rebuild individual submodules with their respective command.
|
||||
|
||||
| Dependency | Run this command |
|
||||
| -------------- | ---------------------------------------- |
|
||||
| WebKit | `bun install` (it is a prebuilt package) |
|
||||
| uWebSockets | `make uws` |
|
||||
| Mimalloc | `make mimalloc` |
|
||||
| PicoHTTPParser | `make picohttp` |
|
||||
| zlib | `make zlib` |
|
||||
| BoringSSL | `make boringssl` |
|
||||
| libarchive | `make libarchive` |
|
||||
| lolhtml | `make lolhtml` |
|
||||
| sqlite | `make sqlite` |
|
||||
| TinyCC | `make tinycc` |
|
||||
| c-ares | `make c-ares` |
|
||||
| zstd | `make zstd` |
|
||||
| Base64 | `make base64` |
|
||||
|
||||
The above will probably also need Zig and/or C++ code rebuilt.
|
||||
|
||||
## VSCode
|
||||
|
||||
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
|
||||
|
||||
### ZLS
|
||||
|
||||
ZLS is the language server for Zig. The latest binary that the extension auto-updates may not function with the version of Zig that Bun uses. It may be more reliable to build ZLS from source:
|
||||
|
||||
```bash
|
||||
$ git clone https://github.com/zigtools/zls
|
||||
$ cd zls
|
||||
$ git checkout f91ff831f4959efcb7e648dba4f0132c296d26c0
|
||||
$ zig build
|
||||
```
|
||||
|
||||
Then add absolute paths to Zig and ZLS in your vscode config:
|
||||
|
||||
```json
|
||||
{
|
||||
"zig.zigPath": "/path/to/zig/install/zig",
|
||||
"zig.zls.path": "/path/to/zls/zig-out/bin/zls"
|
||||
}
|
||||
```
|
||||
|
||||
## JavaScript builtins
|
||||
|
||||
When you change anything in `src/js/builtins/*` or switch branches, run this:
|
||||
|
||||
```bash
|
||||
$ make regenerate-bindings
|
||||
$ make js cpp
|
||||
```
|
||||
|
||||
That inlines the TypeScript code into C++ headers.
|
||||
@@ -154,6 +214,8 @@ That inlines the TypeScript code into C++ headers.
|
||||
Make sure you have `ccache` installed, otherwise regeneration will take much longer than it should.
|
||||
{% /callout %}
|
||||
|
||||
For more information on how `src/js` works, see `src/js/README.md` in the codebase.
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
Bun leverages a lot of code generation scripts.
|
||||
@@ -193,7 +255,7 @@ Certain modules like `node:fs`, `node:stream`, `bun:sqlite`, and `ws` are implem
|
||||
When these are changed, run:
|
||||
|
||||
```
|
||||
$ make esm
|
||||
$ make js
|
||||
```
|
||||
|
||||
In debug builds, Bun automatically loads these from the filesystem, wherever it was compiled, so no need to re-run `make dev`. In release builds, this same behavior can be done via the environment variable `BUN_OVERRIDE_MODULE_PATH`. When set to the repository root, Bun will read from the bundled modules in the repository instead of the ones baked into the binary.
|
||||
@@ -244,7 +306,7 @@ For performance reasons, `make submodule` does not automatically update the WebK
|
||||
|
||||
```bash
|
||||
$ bun install
|
||||
$ make regenerate-bindings
|
||||
$ make cpp
|
||||
```
|
||||
|
||||
<!-- Check the [Bun repo](https://github.com/oven-sh/bun/tree/main/src/bun.js) to get the hash of the commit of WebKit is currently being used.
|
||||
|
||||
@@ -14,7 +14,7 @@ console.log(<Component message="Hello world!" />);
|
||||
|
||||
## Configuration
|
||||
|
||||
Bun reads your `tsconfig.json` or `jsconfig.json` configuration files to determines how to perform the JSX transform internally. To avoid using either of these, the following options can also be defined in [`bunfig.json`](/docs/runtime/configuration).
|
||||
Bun reads your `tsconfig.json` or `jsconfig.json` configuration files to determines how to perform the JSX transform internally. To avoid using either of these, the following options can also be defined in [`bunfig.toml`](/docs/runtime/configuration).
|
||||
|
||||
The following compiler options are respected.
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:async_hooks`](https://nodejs.org/api/async_hooks.html)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟡 Only `AsyncLocalStorage`, and `AsyncResource` are implemented.
|
||||
|
||||
### [`node:buffer`](https://nodejs.org/api/buffer.html)
|
||||
|
||||
@@ -50,7 +50,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:events`](https://nodejs.org/api/events.html)
|
||||
|
||||
🟡 Missing `EventEmitterAsyncResource` `events.on`.
|
||||
🟡 Missing `on`
|
||||
|
||||
### [`node:fs`](https://nodejs.org/api/fs.html)
|
||||
|
||||
@@ -138,7 +138,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:tty`](https://nodejs.org/api/tty.html)
|
||||
|
||||
🟡 Missing `tty.ReadStream` and `tty.WriteStream`.
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`node:url`](https://nodejs.org/api/url.html)
|
||||
|
||||
@@ -162,11 +162,11 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:worker_threads`](https://nodejs.org/api/worker_threads.html)
|
||||
|
||||
🟢 Fully implemented.
|
||||
🟡 `Worker` doesn't support the following options: `eval`, `argv`, `execArgv`, `stdin`, `stdout`, `stderr`, `trackedUnmanagedFds`, `resourceLimits`. Missing `markAsUntransferable`, `moveMessagePortToContext`, `getHeapSnapshot`.
|
||||
|
||||
### [`node:zlib`](https://nodejs.org/api/zlib.html)
|
||||
|
||||
🟡 Missing `zlib.brotli*`
|
||||
🟡 Missing `zlib.brotli*`. Some methods are not optimized.
|
||||
|
||||
<!-- {% block className="ScrollFrame" %}
|
||||
{% table %}
|
||||
@@ -485,7 +485,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
|
||||
|
||||
🟢 Fully implemented.
|
||||
🟡 Missing `Console` constructor.
|
||||
|
||||
### [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy)
|
||||
|
||||
|
||||
276
docs/runtime/plugins.md
Normal file
276
docs/runtime/plugins.md
Normal file
@@ -0,0 +1,276 @@
|
||||
{% callout %}
|
||||
**Note** — Introduced in Bun v0.1.11.
|
||||
{% /callout %}
|
||||
|
||||
Bun provides a universal plugin API that can be used to extend both the _runtime_ and [_bundler_](/docs/bundler).
|
||||
|
||||
Plugins intercept imports and perform custom loading logic: reading files, transpiling code, etc. They can be used to add support for additional file types, like `.scss` or `.yaml`. In the context of Bun's bundler, plugins can be used to implement framework-level features like CSS extraction, macros, and client-server code co-location.
|
||||
|
||||
## Usage
|
||||
|
||||
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function. Register a plugin with Bun using the `plugin` function.
|
||||
|
||||
```tsx#myPlugin.ts
|
||||
import { plugin, type BunPlugin } from "bun";
|
||||
|
||||
const myPlugin: BunPlugin = {
|
||||
name: "Custom loader",
|
||||
setup(build) {
|
||||
// implementation
|
||||
},
|
||||
};
|
||||
```
|
||||
|
||||
Plugins have to be registered before any other code runs! To achieve this, use the `preload` option in your [`bunfig.toml`](/docs/runtime/configuration). Bun automatically loads the files/modules specified in `preload` before running a file.
|
||||
|
||||
```toml
|
||||
preload = ["./myPlugin.ts"]
|
||||
```
|
||||
|
||||
To preload files before `bun test`:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
preload = ["./myPlugin.ts"]
|
||||
```
|
||||
|
||||
## Third-party plugins
|
||||
|
||||
By convention, third-party plugins intended for consumption should export a factory function that accepts some configuration and returns a plugin object.
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
import fooPlugin from "bun-plugin-foo";
|
||||
|
||||
plugin(
|
||||
fooPlugin({
|
||||
// configuration
|
||||
}),
|
||||
);
|
||||
```
|
||||
|
||||
Bun's plugin API is based on [esbuild](https://esbuild.github.io/plugins). Only [a subset](/docs/bundler/vs-esbuild#plugin-api) of the esbuild API is implemented, but some esbuild plugins "just work" in Bun, like the official [MDX loader](https://mdxjs.com/packages/esbuild/):
|
||||
|
||||
```jsx
|
||||
import { plugin } from "bun";
|
||||
import mdx from "@mdx-js/esbuild";
|
||||
|
||||
plugin(mdx());
|
||||
```
|
||||
|
||||
## Loaders
|
||||
|
||||
Plugins are primarily used to extend Bun with loaders for additional file types. Let's look at a simple plugin that implements a loader for `.yaml` files.
|
||||
|
||||
```ts#yamlPlugin.ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "YAML",
|
||||
async setup(build) {
|
||||
const { load } = await import("js-yaml");
|
||||
const { readFileSync } = await import("fs");
|
||||
|
||||
// when a .yaml file is imported...
|
||||
build.onLoad({ filter: /\.(yaml|yml)$/ }, (args) => {
|
||||
|
||||
// read and parse the file
|
||||
const text = readFileSync(args.path, "utf8");
|
||||
const exports = load(text) as Record<string, any>;
|
||||
|
||||
// and returns it as a module
|
||||
return {
|
||||
exports,
|
||||
loader: "object", // special loader for JS objects
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
With this plugin, data can be directly imported from `.yaml` files.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#index.ts
|
||||
import "./yamlPlugin.ts"
|
||||
import {name, releaseYear} from "./data.yml"
|
||||
|
||||
console.log(name, releaseYear);
|
||||
```
|
||||
|
||||
```yaml#data.yml
|
||||
name: Fast X
|
||||
releaseYear: 2023
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
Note that the returned object has a `loader` property. This tells Bun which of its internal loaders should be used to handle the result. Even though we're implementing a loader for `.yaml`, the result must still be understandable by one of Bun's built-in loaders. It's loaders all the way down.
|
||||
|
||||
In this case we're using `"object"`—a built-in loader (intended for use by plugins) that converts a plain JavaScript object to an equivalent ES module. Any of Bun's built-in loaders are supported; these same loaders are used by Bun internally for handling files of various kinds. The table below is a quick reference; refer to [Bundler > Loaders](/docs/bundler/loaders) for complete documentation.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Loader
|
||||
- Extensions
|
||||
- Output
|
||||
|
||||
---
|
||||
|
||||
- `js`
|
||||
- `.mjs` `.cjs`
|
||||
- Transpile to JavaScript files
|
||||
|
||||
---
|
||||
|
||||
- `jsx`
|
||||
- `.js` `.jsx`
|
||||
- Transform JSX then transpile
|
||||
|
||||
---
|
||||
|
||||
- `ts`
|
||||
- `.ts` `.mts` `cts`
|
||||
- Transform TypeScript then transpile
|
||||
|
||||
---
|
||||
|
||||
- `tsx`
|
||||
- `.tsx`
|
||||
- Transform TypeScript, JSX, then transpile
|
||||
|
||||
---
|
||||
|
||||
- `toml`
|
||||
- `.toml`
|
||||
- Parse using Bun's built-in TOML parser
|
||||
|
||||
---
|
||||
|
||||
- `json`
|
||||
- `.json`
|
||||
- Parse using Bun's built-in JSON parser
|
||||
|
||||
---
|
||||
|
||||
- `napi`
|
||||
- `.node`
|
||||
- Import a native Node.js addon
|
||||
|
||||
---
|
||||
|
||||
- `wasm`
|
||||
- `.wasm`
|
||||
- Import a native Node.js addon
|
||||
|
||||
---
|
||||
|
||||
- `object`
|
||||
- _none_
|
||||
- A special loader intended for plugins that converts a plain JavaScript object to an equivalent ES module. Each key in the object corresponds to a named export.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
Loading a YAML file is useful, but plugins support more than just data loading. Let's look at a plugin that lets Bun import `*.svelte` files.
|
||||
|
||||
```ts#sveltePlugin.ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
await plugin({
|
||||
name: "svelte loader",
|
||||
async setup(build) {
|
||||
const { compile } = await import("svelte/compiler");
|
||||
const { readFileSync } = await import("fs");
|
||||
|
||||
// when a .svelte file is imported...
|
||||
build.onLoad({ filter: /\.svelte$/ }, ({ path }) => {
|
||||
|
||||
// read and compile it with the Svelte compiler
|
||||
const file = readFileSync(path, "utf8");
|
||||
const contents = compile(file, {
|
||||
filename: path,
|
||||
generate: "ssr",
|
||||
}).js.code;
|
||||
|
||||
// and return the compiled source code as "js"
|
||||
return {
|
||||
contents,
|
||||
loader: "js",
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
> Note: in a production implementation, you'd want to cache the compiled output and include additional error handling.
|
||||
|
||||
The object returned from `build.onLoad` contains the compiled source code in `contents` and specifies `"js"` as its loader. That tells Bun to consider the returned `contents` to be a JavaScript module and transpile it using Bun's built-in `js` loader.
|
||||
|
||||
With this plugin, Svelte components can now be directly imported and consumed.
|
||||
|
||||
```js
|
||||
import "./sveltePlugin.ts";
|
||||
import MySvelteComponent from "./component.svelte";
|
||||
|
||||
console.log(mySvelteComponent.render());
|
||||
```
|
||||
|
||||
## Reading the config
|
||||
|
||||
Plugins can read and write to the [build config](/docs/bundler#api) with `build.config`.
|
||||
|
||||
```ts
|
||||
Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
sourcemap: "external",
|
||||
plugins: [
|
||||
{
|
||||
name: "demo",
|
||||
setup(build) {
|
||||
console.log(build.config.sourcemap); // "external"
|
||||
|
||||
build.config.minify = true; // enable minification
|
||||
|
||||
// `plugins` is readonly
|
||||
console.log(`Number of plugins: ${build.config.plugins.length}`);
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
```ts
|
||||
namespace Bun {
|
||||
function plugin(plugin: {
|
||||
name: string;
|
||||
setup: (build: PluginBuilder) => void;
|
||||
}): void;
|
||||
}
|
||||
|
||||
type PluginBuilder = {
|
||||
onResolve: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string; importer: string }) => {
|
||||
path: string;
|
||||
namespace?: string;
|
||||
} | void,
|
||||
) => void;
|
||||
onLoad: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string }) => {
|
||||
loader?: Loader;
|
||||
contents?: string;
|
||||
exports?: Record<string, any>;
|
||||
},
|
||||
) => void;
|
||||
config: BuildConfig;
|
||||
};
|
||||
|
||||
type Loader = "js" | "jsx" | "ts" | "tsx" | "json" | "toml" | "object";
|
||||
```
|
||||
|
||||
The `onLoad` method optionally accepts a `namespace` in addition to the `filter` regex. This namespace will be be used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.
|
||||
@@ -78,6 +78,8 @@ These are the recommended `compilerOptions` for a Bun project.
|
||||
"moduleResolution": "bundler",
|
||||
"noEmit": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"composite": true,
|
||||
"moduleDetection": "force",
|
||||
// if TS 4.x or earlier
|
||||
"moduleResolution": "nodenext",
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
`bun:test` supports seeing which lines of code are covered by tests. To use this feature, pass `--coverage` to the CLI:
|
||||
Bun's test runner now supports built-in _code coverage reporting_. This makes it easy to see how much of the codebase is covered by tests, and find areas that are not currently well-tested.
|
||||
|
||||
```sh
|
||||
bun test --coverage
|
||||
```
|
||||
## Enabling coverage
|
||||
|
||||
It will print out a coverage report to the console:
|
||||
`bun:test` supports seeing which lines of code are covered by tests. To use this feature, pass `--coverage` to the CLI. It will print out a coverage report to the console:
|
||||
|
||||
```js
|
||||
$ bun test --coverage
|
||||
-------------|---------|---------|-------------------
|
||||
File | % Funcs | % Lines | Uncovered Line #s
|
||||
-------------|---------|---------|-------------------
|
||||
@@ -26,32 +25,45 @@ All files | 38.89 | 42.11 |
|
||||
-------------|---------|---------|-------------------
|
||||
```
|
||||
|
||||
If coverage is below a threshold, `bun:test` will exit with a non-zero exit code to indicate the failure.
|
||||
|
||||
### Configuring coverage
|
||||
|
||||
`bunfig.toml` supports configuring coverage:
|
||||
To always enable coverage reporting by default, add the following line to your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
|
||||
# Always enable coverage
|
||||
# always enable coverage
|
||||
coverage = true
|
||||
|
||||
# Anything less than 90% coverage will fail the test
|
||||
# coverageThreshold = 0.9
|
||||
coverageThreshold = { line = 0.9, function = 0.9 }
|
||||
|
||||
|
||||
# Don't include .test.* files in coverage reports
|
||||
coverageSkipTestFiles = true
|
||||
|
||||
# Disable sourcemap support in coverage reports
|
||||
# By default, coverage reports will automatically use Bun's internal sourcemap.
|
||||
# You probably don't want to configure this
|
||||
# coverageIgnoreSourcemaps = false
|
||||
```
|
||||
|
||||
`coverageThreshold` can be either a number or an object with `line` and `function` keys. When a number, it is treated as both the line and function threshold.
|
||||
By default coverage reports will _include_ test files and _exclude_ sourcemaps. This is usually what you want, but it can be configured otherwise in `bunfig.toml`.
|
||||
|
||||
Coverage support was added in Bun v0.7.3.
|
||||
```toml
|
||||
[test]
|
||||
coverageSkipTestFiles = true # default false
|
||||
```
|
||||
|
||||
### Coverage thresholds
|
||||
|
||||
{% callout %}
|
||||
**Note** — Support for coverage reporting was added in Bun v0.7.3.
|
||||
{% /callout %}
|
||||
|
||||
It is possible to specify a coverage threshold in `bunfig.toml`. If your test suite does not meet or exceed this threshold, `bun test` will exit with a non-zero exit code to indicate the failure.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
|
||||
# to require 90% line-level and function-level coverage
|
||||
coverageThreshold = 0.9
|
||||
|
||||
# to set different thresholds for lines and functions
|
||||
coverageThreshold = { line = 0.9, function = 0.9 }
|
||||
```
|
||||
|
||||
### Sourcemaps
|
||||
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `false`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
// Accepts a string, TypedArray, or Blob (file blob supported is not implemented but planned)
|
||||
// Accepts a string, TypedArray, or Blob (file blob support is not implemented but planned)
|
||||
const input = "hello world".repeat(400);
|
||||
|
||||
// Bun.hash() defaults to Wyhash because it's fast
|
||||
console.log(Bun.hash(input));
|
||||
|
||||
console.log(Bun.hash.wyhash(input));
|
||||
// and returns a number
|
||||
// all of these hashing functions return numbers, not typed arrays.
|
||||
console.log(Bun.hash.adler32(input));
|
||||
console.log(Bun.hash.crc32(input));
|
||||
console.log(Bun.hash.cityHash32(input));
|
||||
console.log(Bun.hash.cityHash64(input));
|
||||
console.log(Bun.hash.murmur32v3(input));
|
||||
console.log(Bun.hash.murmur64v2(input));
|
||||
// and returns a bigint
|
||||
// all of these hashing functions return number if 32-bit or bigint if 64-bit, not typed arrays.
|
||||
console.log(Bun.hash.adler32(input)); // number
|
||||
console.log(Bun.hash.crc32(input)); // number
|
||||
console.log(Bun.hash.cityHash32(input)); // number
|
||||
console.log(Bun.hash.cityHash64(input)); // bigint
|
||||
console.log(Bun.hash.murmur32v3(input)); // number
|
||||
console.log(Bun.hash.murmur32v2(input)); // number
|
||||
console.log(Bun.hash.murmur64v2(input)); // bigint
|
||||
|
||||
// Second argument accepts a seed where relevant
|
||||
console.log(Bun.hash(input, 12345));
|
||||
|
||||
@@ -187,7 +187,7 @@ pub fn main() anyerror!void {
|
||||
var ctx = try default_allocator.create(HTTP.HTTPChannelContext);
|
||||
ctx.* = .{
|
||||
.channel = channel,
|
||||
.http = try HTTP.AsyncHTTP.init(default_allocator, args.method, args.url, args.headers, args.headers_buf, response_body_string, args.body, 0, HTTP.FetchRedirect.follow),
|
||||
.http = try HTTP.AsyncHTTP.init(default_allocator, args.method, args.url, args.headers, args.headers_buf, response_body_string, args.body, 0, HTTP.FetchRedirect.follow,),
|
||||
};
|
||||
ctx.http.callback = HTTP.HTTPChannelContext.callback;
|
||||
var batch = HTTPThread.Batch{};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@vscode/debugadapter": "^1.61.0",
|
||||
"esbuild": "^0.17.15",
|
||||
"eslint": "^8.20.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
@@ -24,7 +25,7 @@
|
||||
"@types/react": "^18.0.25",
|
||||
"@typescript-eslint/eslint-plugin": "^5.31.0",
|
||||
"@typescript-eslint/parser": "^5.31.0",
|
||||
"bun-webkit": "0.0.1-74609640b2a7c5a1588b824f870d1b0ff91bfd8e"
|
||||
"bun-webkit": "0.0.1-48c1316e907ca597e27e5a7624160dc18a4df8ec"
|
||||
},
|
||||
"version": "0.0.0",
|
||||
"prettier": "./.prettierrc.cjs"
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"ESNext"
|
||||
],
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"moduleDetection": "force",
|
||||
"allowImportingTsExtensions": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"composite": true,
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
@@ -19,8 +19,5 @@
|
||||
"bun-types" // add Bun global
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
"modules.d.ts"
|
||||
]
|
||||
}
|
||||
"include": ["**/*.ts", "modules.d.ts"]
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"ESNext"
|
||||
],
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"moduleDetection": "force",
|
||||
"allowImportingTsExtensions": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"composite": true,
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
@@ -19,8 +19,5 @@
|
||||
"bun-types" // add Bun global
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"**/*.ts",
|
||||
"modules.d.ts"
|
||||
]
|
||||
}
|
||||
"include": ["**/*.ts", "modules.d.ts"]
|
||||
}
|
||||
|
||||
172
packages/bun-polyfills/.gitignore
vendored
Normal file
172
packages/bun-polyfills/.gitignore
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
|
||||
# Misc
|
||||
|
||||
_*
|
||||
.old
|
||||
.vscode
|
||||
!build
|
||||
9
packages/bun-polyfills/README.md
Normal file
9
packages/bun-polyfills/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Bun APIs Polyfills
|
||||
|
||||
Polyfills for Bun's JavaScript runtime APIs for use in environments outside of Bun, such as Node.js or the browser¹.
|
||||
|
||||
¹ **Note:** The current priority is Node.js, browser support will vary per polyfill.
|
||||
|
||||
## Usage
|
||||
|
||||
This is currently a work in progress and is not ready for general use.
|
||||
BIN
packages/bun-polyfills/bun.lockb
Executable file
BIN
packages/bun-polyfills/bun.lockb
Executable file
Binary file not shown.
95
packages/bun-polyfills/lib/zighash/index.mjs
Normal file
95
packages/bun-polyfills/lib/zighash/index.mjs
Normal file
@@ -0,0 +1,95 @@
|
||||
// @ts-check
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const { instance } = /** @type {ZighashInstance} */(
|
||||
await WebAssembly.instantiate(
|
||||
fs.readFileSync(path.join(path.dirname(fileURLToPath(import.meta.url)), 'zighash.wasm')),
|
||||
{
|
||||
env: {
|
||||
/** @param {any} x */
|
||||
print(x) { console.log(x); },
|
||||
},
|
||||
}
|
||||
)
|
||||
);
|
||||
const exports = instance.exports;
|
||||
const mem = exports.memory;
|
||||
const memview = {
|
||||
get u8() { return new Uint8Array(mem.buffer); },
|
||||
get u16() { return new Uint16Array(mem.buffer); },
|
||||
get u32() { return new Uint32Array(mem.buffer); },
|
||||
get u64() { return new BigUint64Array(mem.buffer); },
|
||||
get i8() { return new Int8Array(mem.buffer); },
|
||||
get i16() { return new Int16Array(mem.buffer); },
|
||||
get i32() { return new Int32Array(mem.buffer); },
|
||||
get i64() { return new BigInt64Array(mem.buffer); },
|
||||
get f32() { return new Float32Array(mem.buffer); },
|
||||
get f64() { return new Float64Array(mem.buffer); },
|
||||
};
|
||||
|
||||
const nullptr = { ptr: -1, size: 0 };
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
const allocBuffer = (
|
||||
/** @type {ArrayBufferView | ArrayBuffer | SharedArrayBuffer} */ buf,
|
||||
/** @type {boolean=} */ nullTerminate = false,
|
||||
) => {
|
||||
const size = buf.byteLength + +nullTerminate;
|
||||
if (size === 0) return nullptr;
|
||||
const ptr = exports.alloc(size);
|
||||
if (ptr === -1) throw new Error('WASM memory allocation failed');
|
||||
const u8heap = memview.u8;
|
||||
u8heap.set(new Uint8Array(ArrayBuffer.isView(buf) ? buf.buffer : buf), ptr);
|
||||
if (nullTerminate) u8heap[ptr + buf.byteLength] = 0;
|
||||
return { ptr, size };
|
||||
};
|
||||
const allocString = (
|
||||
/** @type {string} */ str,
|
||||
/** @type {boolean=} */ nullTerminate = true,
|
||||
) => {
|
||||
const strbuf = encoder.encode(str);
|
||||
return allocBuffer(strbuf, nullTerminate);
|
||||
};
|
||||
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function wyhash(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.wyhash(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function adler32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.adler32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function crc32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.crc32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSHash32Function} */
|
||||
export function cityhash32(input = '') {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.cityhash32(ptr, size) >>> 0;
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function cityhash64(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.cityhash64(ptr, size, seed));
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function murmur32v3(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.murmur32v3(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
|
||||
}
|
||||
/** @type {JSSeededHash32Function} */
|
||||
export function murmur32v2(input = '', seed = 0) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return exports.murmur32v2(ptr, size, seed); //! Bun doesn't unsigned-cast this one, likely unintended but for now we'll do the same
|
||||
}
|
||||
/** @type {JSSeededHash64Function} */
|
||||
export function murmur64v2(input = '', seed = 0n) {
|
||||
const { ptr, size } = typeof input === 'string' ? allocString(input, false) : allocBuffer(input);
|
||||
return BigInt.asUintN(64, exports.murmur64v2(ptr, size, seed));
|
||||
}
|
||||
10
packages/bun-polyfills/lib/zighash/package.json
Normal file
10
packages/bun-polyfills/lib/zighash/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"name": "zighash-wasm",
|
||||
"module": "index.mjs",
|
||||
"scripts": {
|
||||
"build": "bun run clean && zig build-lib src/main.zig --name zighash -target wasm32-freestanding -dynamic -rdynamic -OReleaseSmall",
|
||||
"clean": "rm -f *.wasm *.o"
|
||||
}
|
||||
}
|
||||
58
packages/bun-polyfills/lib/zighash/src/main.zig
Normal file
58
packages/bun-polyfills/lib/zighash/src/main.zig
Normal file
@@ -0,0 +1,58 @@
|
||||
const std = @import("std");
|
||||
|
||||
extern fn print(*const u8) void;
|
||||
|
||||
comptime {
|
||||
std.debug.assert(@alignOf(u16) >= 2);
|
||||
std.debug.assert(@alignOf(u32) >= 4);
|
||||
std.debug.assert(@alignOf(u64) >= 8);
|
||||
std.debug.assert(@alignOf(i16) >= 2);
|
||||
std.debug.assert(@alignOf(i32) >= 4);
|
||||
std.debug.assert(@alignOf(i64) >= 8);
|
||||
}
|
||||
|
||||
export fn alloc(size: u32) [*]const u8 {
|
||||
const slice = std.heap.wasm_allocator.alloc(u8, size) catch @panic("wasm failed to allocate memory");
|
||||
return slice.ptr;
|
||||
}
|
||||
|
||||
export fn wyhash(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Wyhash.hash(seed, input);
|
||||
}
|
||||
export fn adler32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Adler32.hash(input);
|
||||
}
|
||||
export fn crc32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Crc32.hash(input);
|
||||
}
|
||||
export fn cityhash32(input_ptr: [*]const u8, input_size: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.CityHash32.hash(input);
|
||||
}
|
||||
export fn cityhash64(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.CityHash64.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur32v3(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur3_32.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur32v2(input_ptr: [*]const u8, input_size: u32, seed: u32) u32 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur2_32.hashWithSeed(input, seed);
|
||||
}
|
||||
export fn murmur64v2(input_ptr: [*]const u8, input_size: u32, seed: u64) u64 {
|
||||
const input: []const u8 = input_ptr[0..input_size];
|
||||
defer std.heap.wasm_allocator.free(input);
|
||||
return std.hash.Murmur2_64.hashWithSeed(input, seed);
|
||||
}
|
||||
25
packages/bun-polyfills/lib/zighash/types.d.ts
vendored
Normal file
25
packages/bun-polyfills/lib/zighash/types.d.ts
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
type WasmHash32Function = (input_ptr: number, input_size: number) => number;
|
||||
type WasmHash64Function = (input_ptr: number, input_size: number) => bigint;
|
||||
type WasmSeededHash32Function = (input_ptr: number, input_size: number, seed: number) => number;
|
||||
type WasmSeededHash64Function = (input_ptr: number, input_size: number, seed: bigint) => bigint;
|
||||
type JSHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => number;
|
||||
type JSHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer) => bigint;
|
||||
type JSSeededHash32Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: number) => number;
|
||||
type JSSeededHash64Function = (input: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer, seed?: bigint) => bigint;
|
||||
|
||||
type ZighashInstance = WebAssembly.WebAssemblyInstantiatedSource & {
|
||||
instance: {
|
||||
exports: {
|
||||
memory: WebAssembly.Memory,
|
||||
alloc(size: number): number,
|
||||
wyhash: WasmSeededHash64Function,
|
||||
adler32: WasmHash32Function,
|
||||
crc32: WasmHash32Function,
|
||||
cityhash32: WasmHash32Function,
|
||||
cityhash64: WasmSeededHash64Function,
|
||||
murmur32v3: WasmSeededHash32Function,
|
||||
murmur32v2: WasmSeededHash32Function,
|
||||
murmur64v2: WasmSeededHash64Function,
|
||||
};
|
||||
};
|
||||
}
|
||||
BIN
packages/bun-polyfills/lib/zighash/zighash.wasm
Executable file
BIN
packages/bun-polyfills/lib/zighash/zighash.wasm
Executable file
Binary file not shown.
28
packages/bun-polyfills/package.json
Normal file
28
packages/bun-polyfills/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"type": "module",
|
||||
"name": "bun-polyfills",
|
||||
"module": "src/index.ts",
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.4.5",
|
||||
"@types/which": "^3.0.0",
|
||||
"bun-types": "^0.7.0",
|
||||
"copyfiles": "^2.4.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"node": "node --enable-source-maps --import ./dist/src/repl.js",
|
||||
"clean": "rm -rf dist",
|
||||
"preprocess": "bun tools/updateversions.ts",
|
||||
"build": "bun run clean && bun run preprocess && bunx tsc && bunx copyfiles \"./**/*.wasm\" dist",
|
||||
"build/wasm": "bun run build/zighash",
|
||||
"build/zighash": "cd lib/zighash && bun run build && cd ../.."
|
||||
},
|
||||
"dependencies": {
|
||||
"js-md4": "^0.3.2",
|
||||
"open-editor": "^4.0.0",
|
||||
"supports-color": "^9.4.0",
|
||||
"which": "^3.0.1"
|
||||
}
|
||||
}
|
||||
31
packages/bun-polyfills/src/global/console.ts
Normal file
31
packages/bun-polyfills/src/global/console.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
//? Implements: Red colored console.error from Bun
|
||||
//if (Bun.enableANSIColors) {
|
||||
// const RED = '\x1B[31m' as const;
|
||||
// const RESET = '\x1B[0m' as const;
|
||||
// const consoleError = console.error;
|
||||
// console.error = (...args) => {
|
||||
// if (typeof args[0] === 'string') args[0] = RED + args[0];
|
||||
// consoleError(...args, RESET);
|
||||
// };
|
||||
//}
|
||||
|
||||
//? Implements: for await (const line of console) { ... }
|
||||
console[Symbol.asyncIterator] = async function* () {
|
||||
while (true) yield await new Promise(resolve => {
|
||||
process.stdin.on('data', (data: Buffer | string) => {
|
||||
const str = data.toString('utf-8').replaceAll(/[\r\n]+/g, '');
|
||||
resolve(str);
|
||||
});
|
||||
});
|
||||
} satisfies Console[typeof Symbol.asyncIterator];
|
||||
|
||||
//? Implements: Bun-exclusive console function
|
||||
console.write = ((...data) => {
|
||||
const str = data.map(val => {
|
||||
if (val instanceof ArrayBuffer) val = new TextDecoder('utf-8').decode(val);
|
||||
else if (typeof val === 'object') val = new TextDecoder('utf-8').decode(val.buffer);
|
||||
return val;
|
||||
}).join('');
|
||||
process.stdout.write(str);
|
||||
return new TextEncoder('utf-8').encode(str).byteLength;
|
||||
}) satisfies Console['write'];
|
||||
34
packages/bun-polyfills/src/global/importmeta.ts
Normal file
34
packages/bun-polyfills/src/global/importmeta.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
// Without an ESM loader, this polyfill is impossible to apply automatically,
|
||||
// due to the per-module nature of import.meta. In order to use this polyfill,
|
||||
// you must import it in every module that uses import.meta, and call it with
|
||||
// the import.meta object as the argument. When the polyfills are integrated
|
||||
// with bun build, this could be done automatically by the build process at
|
||||
// the top of every module file bundled.
|
||||
|
||||
export default function polyfillImportMeta(metaIn: ImportMeta) {
|
||||
const require2 = createRequire(metaIn.url);
|
||||
const metapath = fileURLToPath(metaIn.url);
|
||||
const meta: ImportMeta = {
|
||||
url: metaIn.url,
|
||||
main: metapath === process.argv[1],
|
||||
path: metapath,
|
||||
dir: path.dirname(metapath),
|
||||
file: path.basename(metapath),
|
||||
require: require2,
|
||||
async resolve(id: string, parent?: string) {
|
||||
return this.resolveSync(id, parent);
|
||||
},
|
||||
resolveSync(id: string, parent?: string) {
|
||||
return require2.resolve(id, {
|
||||
paths: typeof parent === 'string' ? [
|
||||
path.resolve(parent.startsWith('file://') ? fileURLToPath(parent) : parent, '..')
|
||||
] : undefined,
|
||||
});
|
||||
},
|
||||
};
|
||||
Object.assign(metaIn, meta);
|
||||
}
|
||||
45
packages/bun-polyfills/src/global/index.ts
Normal file
45
packages/bun-polyfills/src/global/index.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { version } from '../modules/bun.js';
|
||||
import './console.js';
|
||||
import './process.js';
|
||||
import os from 'node:os';
|
||||
|
||||
//? NodeJS Blob doesn't implement Blob.json(), so we need to polyfill it.
|
||||
Blob.prototype.json = async function json<T>(this: Blob): Promise<T> {
|
||||
try {
|
||||
return JSON.parse(await this.text()) as T;
|
||||
} catch (err) {
|
||||
Error.captureStackTrace(err as Error, json);
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
//? navigator global object polyfill
|
||||
Reflect.set(globalThis, 'navigator', {
|
||||
userAgent: `Bun/${version}`,
|
||||
hardwareConcurrency: os.cpus().length,
|
||||
});
|
||||
|
||||
//? method only available in Bun
|
||||
// this isn't quite accurate, but it shouldn't break anything and is currently here just for matching bun and node types
|
||||
const ReadableStreamDefaultReaderPrototype = Object.getPrototypeOf(new ReadableStream().getReader());
|
||||
Reflect.set(
|
||||
ReadableStreamDefaultReaderPrototype, 'readMany',
|
||||
function readMany(this: ReadableStreamDefaultReader): Promise<ReadableStreamDefaultReadManyResult<any>> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const result: ReadableStreamDefaultReadManyResult<any> = {
|
||||
value: [],
|
||||
size: 0,
|
||||
done: true
|
||||
};
|
||||
this.read().then(({ done, value }) => {
|
||||
if (done) resolve(result);
|
||||
else {
|
||||
result.value.push(value);
|
||||
result.size = value.length;
|
||||
result.done = false;
|
||||
resolve(result);
|
||||
}
|
||||
}, reject);
|
||||
});
|
||||
}
|
||||
);
|
||||
19
packages/bun-polyfills/src/global/process.ts
Normal file
19
packages/bun-polyfills/src/global/process.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
|
||||
if (typeof process === 'object' && process !== null) {
|
||||
// process polyfills (node-only)
|
||||
Reflect.set(process, 'isBun', 1 satisfies Process['isBun']);
|
||||
Reflect.set(process, 'browser', false satisfies Process['browser']);
|
||||
|
||||
const NULL_VERSION = '0'.repeat(39) + '1';
|
||||
process.versions.bun = '0.7.1' satisfies Process['versions'][string]; // TODO: This can probably be fetched from somewhere in the repo
|
||||
process.versions.webkit = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.mimalloc = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.libarchive = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.picohttpparser = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.boringssl = NULL_VERSION satisfies Process['versions'][string];
|
||||
process.versions.zig = '0.10.0' satisfies Process['versions'][string];
|
||||
Reflect.set(process, 'revision', NULL_VERSION satisfies Process['revision']);
|
||||
|
||||
// Doesn't work on Windows sadly
|
||||
//Object.defineProperty(process, 'execPath', { value: path.resolve(root, 'cli.js') });
|
||||
}
|
||||
3
packages/bun-polyfills/src/index.ts
Normal file
3
packages/bun-polyfills/src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './modules/bun.js';
|
||||
export * as default from './modules/bun.js';
|
||||
import './global/index.js';
|
||||
497
packages/bun-polyfills/src/modules/bun.ts
Normal file
497
packages/bun-polyfills/src/modules/bun.ts
Normal file
@@ -0,0 +1,497 @@
|
||||
import type {
|
||||
BunPlugin, PluginConstraints, PluginBuilder, OnLoadCallback, OnResolveCallback, HeapSnapshot,
|
||||
EditorOptions, SpawnOptions, Subprocess, SyncSubprocess, FileBlob as BunFileBlob, ArrayBufferView, Hash
|
||||
} from 'bun';
|
||||
import { TextDecoderStream } from 'node:stream/web';
|
||||
import { NotImplementedError, type SystemError } from '../utils/errors.js';
|
||||
import { streamToBuffer, isArrayBufferView, isFileBlob, isOptions } from '../utils/misc.js';
|
||||
import dnsPolyfill from './bun/dns.js';
|
||||
import { FileSink } from './bun/filesink.js';
|
||||
import {
|
||||
bunHash, bunHashProto,
|
||||
MD4 as MD4Polyfill, MD5 as MD5Polyfill,
|
||||
SHA1 as SHA1Polyfill, SHA224 as SHA224Polyfill,
|
||||
SHA256 as SHA256Polyfill, SHA384 as SHA384Polyfill,
|
||||
SHA512 as SHA512Polyfill, SHA512_256 as SHA512_256Polyfill
|
||||
} from './bun/hashes.js';
|
||||
import { ArrayBufferSink as ArrayBufferSinkPolyfill } from './bun/arraybuffersink.js';
|
||||
import { FileBlob, NodeJSStreamFileBlob } from './bun/fileblob.js';
|
||||
import fs from 'node:fs';
|
||||
import v8 from 'node:v8';
|
||||
import path from 'node:path';
|
||||
import util from 'node:util';
|
||||
import zlib from 'node:zlib';
|
||||
import streams from 'node:stream';
|
||||
import workers from 'node:worker_threads';
|
||||
import chp, { type ChildProcess, type StdioOptions, type SpawnSyncReturns } from 'node:child_process';
|
||||
import { fileURLToPath as fileURLToPathNode, pathToFileURL as pathToFileURLNode } from 'node:url';
|
||||
import npm_which from 'which';
|
||||
import openEditor from 'open-editor';
|
||||
|
||||
export const main = path.resolve(process.cwd(), process.argv[1] ?? 'repl') satisfies typeof Bun.main;
|
||||
|
||||
//? These are automatically updated on build by tools/updateversions.ts, do not edit manually.
|
||||
export const version = '0.7.4' satisfies typeof Bun.version;
|
||||
export const revision = '7088d7e182635a58a50860302da0b1abc42c7ce7' satisfies typeof Bun.revision;
|
||||
|
||||
export const gc = (globalThis.gc ? (() => (globalThis.gc!(), process.memoryUsage().heapUsed)) : (() => {
|
||||
const err = new Error('[bun-polyfills] Garbage collection polyfills are only available when Node.js is ran with the --expose-gc flag.');
|
||||
Error.captureStackTrace(err, gc);
|
||||
throw err;
|
||||
})) satisfies typeof Bun.gc;
|
||||
|
||||
//getter(bun, 'cwd', proc.cwd); //! Can't named export a getter
|
||||
export const origin = '' satisfies typeof Bun.origin;
|
||||
// @ts-expect-error ---
|
||||
export const stdin = new NodeJSStreamFileBlob(process.stdin) satisfies typeof Bun.stdin;
|
||||
// @ts-expect-error ---
|
||||
export const stdout = new NodeJSStreamFileBlob(process.stdout) satisfies typeof Bun.stdout;
|
||||
// @ts-expect-error ---
|
||||
export const stderr = new NodeJSStreamFileBlob(process.stderr) satisfies typeof Bun.stderr;
|
||||
export const argv = [process.argv0, ...process.execArgv, ...process.argv.slice(1)] satisfies typeof Bun.argv;
|
||||
export const env = process.env satisfies typeof Bun.env;
|
||||
Object.setPrototypeOf(env, {
|
||||
toJSON(this: typeof env) { return { ...this }; }
|
||||
});
|
||||
// @ts-expect-error supports-color types are unbelievably bad
|
||||
export const enableANSIColors = (await import('supports-color')).createSupportsColor().hasBasic satisfies typeof Bun.enableANSIColors;
|
||||
|
||||
export const hash = bunHash satisfies typeof Bun.hash;
|
||||
Object.setPrototypeOf(hash, bunHashProto satisfies Hash);
|
||||
|
||||
export const unsafe = {
|
||||
gcAggressionLevel: () => 0, //! no-op
|
||||
arrayBufferToString: (buf) => new TextDecoder().decode(buf),
|
||||
segfault: () => {
|
||||
const segfault = new Error();
|
||||
segfault.name = 'SegfaultTest';
|
||||
segfault.message = '';
|
||||
console.error(segfault);
|
||||
process.exit(1);
|
||||
}
|
||||
} satisfies typeof Bun['unsafe'];
|
||||
|
||||
export const SHA1 = SHA1Polyfill satisfies typeof Bun.SHA1;
|
||||
export const MD5 = MD5Polyfill satisfies typeof Bun.MD5;
|
||||
export const MD4 = MD4Polyfill satisfies typeof Bun.MD4;
|
||||
export const SHA224 = SHA224Polyfill satisfies typeof Bun.SHA224;
|
||||
export const SHA512 = SHA512Polyfill satisfies typeof Bun.SHA512;
|
||||
export const SHA384 = SHA384Polyfill satisfies typeof Bun.SHA384;
|
||||
export const SHA256 = SHA256Polyfill satisfies typeof Bun.SHA256;
|
||||
export const SHA512_256 = SHA512_256Polyfill satisfies typeof Bun.SHA512_256;
|
||||
|
||||
export const indexOfLine = ((data, offset) => {
|
||||
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) data = new Uint8Array(data);
|
||||
if (data instanceof DataView || !(data instanceof Uint8Array)) data = new Uint8Array(data.buffer);
|
||||
return data.indexOf(10, offset);
|
||||
}) satisfies typeof Bun.indexOfLine;
|
||||
|
||||
const peek_ = function peek(promise: Parameters<typeof Bun.peek>[0]) {
|
||||
throw new NotImplementedError('Bun.peek', peek);
|
||||
};
|
||||
peek_.status = (promise => {
|
||||
return util.inspect(promise).includes('<pending>') ? 'pending'
|
||||
: util.inspect(promise).includes('<rejected>') ? 'rejected' : 'fulfilled';
|
||||
}) satisfies typeof Bun.peek.status;
|
||||
export const peek = peek_ satisfies typeof Bun.peek;
|
||||
|
||||
export const sleep = (ms => {
|
||||
return new Promise(r => setTimeout(r, ms instanceof Date ? ms.valueOf() - Date.now() : ms));
|
||||
}) satisfies typeof Bun.sleep;
|
||||
export const sleepSync = (ms => {
|
||||
if (ms < 0) throw new TypeError('argument to sleepSync must not be negative');
|
||||
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
|
||||
}) satisfies typeof Bun.sleepSync;
|
||||
|
||||
//? This is not 1:1 matching, but no one should be relying on the exact output of this function anyway.
|
||||
//? To quote Node's inspect itself: "The output of util.inspect() may change at any time and should not be depended upon programmatically."
|
||||
//? Of course in Node's case some didn't listen and relied on the output of util.inspect() anyway, but hopefully this won't happen with this one.
|
||||
export const inspect = ((arg: any): string => util.inspect(arg, {
|
||||
breakLength: Infinity,
|
||||
colors: false,
|
||||
compact: true,
|
||||
customInspect: false,
|
||||
depth: Infinity,
|
||||
getters: true,
|
||||
maxArrayLength: Infinity,
|
||||
maxStringLength: Infinity,
|
||||
showHidden: false,
|
||||
showProxy: false,
|
||||
sorted: false
|
||||
})) satisfies typeof Bun.inspect;
|
||||
|
||||
export const resolveSync = ((id: string, parent: string) => import.meta.resolveSync(id, parent)) satisfies typeof Bun.resolveSync;
|
||||
export const resolve = (async (id: string, parent: string) => import.meta.resolve!(id, parent)) satisfies typeof Bun.resolve;
|
||||
|
||||
//? Yes, this is faster than new Uint8Array(Buffer.allocUnsafe(size).buffer) by about 2.5x in Node.js
|
||||
export const allocUnsafe = ((size: number) => new Uint8Array(size)) satisfies typeof Bun.allocUnsafe;
|
||||
|
||||
export const generateHeapSnapshot = (async (): Promise<HeapSnapshot> => {
|
||||
process.emitWarning('The polyfill for Bun.generateHeapShot is asynchronous, unlike the original which is synchronous.', {
|
||||
type: 'BunPolyfillWarning',
|
||||
code: 'BUN_POLYFILLS_ASYNC_GENERATE_HEAP_SNAPSHOT',
|
||||
detail: 'This is due to v8.getHeapSnapshot() returning a stream in Node.js. This is not a bug, but a limitation of the polyfill.'
|
||||
});
|
||||
const raw = (await streamToBuffer(v8.getHeapSnapshot())).toString('utf8');
|
||||
const json = JSON.parse(raw) as V8HeapSnapshot;
|
||||
return {
|
||||
version: 2,
|
||||
type: 'Inspector',
|
||||
nodes: json.nodes,
|
||||
edges: json.edges,
|
||||
edgeTypes: json.snapshot.meta.edge_types.flat(),
|
||||
edgeNames: json.snapshot.meta.edge_fields.flat(),
|
||||
nodeClassNames: json.snapshot.meta.node_types.flat(),
|
||||
};
|
||||
// @ts-expect-error Refer to the above emitWarning call
|
||||
}) satisfies typeof Bun.generateHeapSnapshot;
|
||||
|
||||
//! This is a no-op in Node.js, as there is no way to shrink the V8 heap from JS as far as I know.
|
||||
export const shrink = (() => void 0) satisfies typeof Bun.shrink;
|
||||
|
||||
export const openInEditor = ((file: string, opts?: EditorOptions) => {
|
||||
const target = [{ file: path.resolve(process.cwd(), file), line: opts?.line, column: opts?.column }] as const;
|
||||
if (opts?.editor) openEditor(target, opts);
|
||||
else openEditor(target, { editor: process.env.TERM_PROGRAM ?? process.env.VISUAL ?? process.env.EDITOR ?? 'vscode' });
|
||||
}) satisfies typeof Bun.openInEditor;
|
||||
|
||||
export const serve = (() => { throw new NotImplementedError('Bun.serve', serve); }) satisfies typeof Bun.serve;
|
||||
|
||||
export const file = ((path: string | URL | Uint8Array | ArrayBufferLike | number, options?: BlobPropertyBag): BunFileBlob => {
|
||||
if (typeof path === 'object') throw new NotImplementedError('Bun.file with typed array', file);
|
||||
return new FileBlob(path, options);
|
||||
}) satisfies typeof Bun.file;
|
||||
|
||||
export const write = (async (dest: BunFileBlob | PathLike, input: string | Blob | TypedArray | ArrayBufferLike | BlobPart[] | Response | BunFileBlob): ReturnType<typeof Bun.write> => {
|
||||
if (!isFileBlob(dest)) {
|
||||
let fd: number;
|
||||
if (dest instanceof ArrayBuffer || dest instanceof SharedArrayBuffer) fd = fs.openSync(Buffer.from(dest), 'w');
|
||||
// bun-types thought it'd be funny to make their own URL definition which doesnt match with the correct URL definition...
|
||||
else if (typeof dest === 'string' || dest instanceof URL) fd = fs.openSync(dest as import('url').URL, 'w');
|
||||
else fd = fs.openSync(Buffer.from(dest.buffer), 'w');
|
||||
|
||||
if (input instanceof Response || input instanceof Blob) {
|
||||
const data = await input.text();
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
|
||||
});
|
||||
}
|
||||
if (Array.isArray(input)) {
|
||||
const data = await new Blob(input).text();
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, data, (err, written) => err ? reject(err) : resolve(written));
|
||||
});
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof input === 'string') return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof Uint8Array) return fs.write(fd, input, (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof ArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
|
||||
if (input instanceof SharedArrayBuffer) return fs.write(fd, new Uint8Array(input), (err, written) => err ? reject(err) : resolve(written));
|
||||
return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
|
||||
});
|
||||
} else {
|
||||
const writer = dest.writer();
|
||||
if (Array.isArray(input)) input = new Blob(input);
|
||||
if (input instanceof Blob || input instanceof Response) return writer.write(await input.arrayBuffer());
|
||||
if (input instanceof ArrayBuffer || input instanceof SharedArrayBuffer || ArrayBuffer.isView(input)) return writer.write(input);
|
||||
if (typeof input === 'string') return writer.write(input);
|
||||
else return write(dest, String(input)); // if all else fails, it seems Bun tries to convert to string and write that.
|
||||
}
|
||||
}) satisfies typeof Bun.write;
|
||||
|
||||
export const sha = SHA512_256.hash satisfies typeof Bun.sha;
|
||||
|
||||
export const nanoseconds = (() => Math.trunc(performance.now() * 1000000)) satisfies typeof Bun.nanoseconds;
|
||||
|
||||
//? This just prints out some debug stuff in console, and as the name implies no one should be using it.
|
||||
//? But, just in case someone does, we'll make it a no-op function so at least the program doesn't crash trying to run the function.
|
||||
export const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = (() => {
|
||||
console.warn('DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump called.');
|
||||
}) satisfies unknown; /* undocumented */
|
||||
|
||||
export const gzipSync = zlib.gzipSync satisfies typeof Bun.gzipSync;
|
||||
export const deflateSync = zlib.deflateSync satisfies typeof Bun.deflateSync;
|
||||
export const gunzipSync = zlib.gunzipSync satisfies typeof Bun.gunzipSync;
|
||||
export const inflateSync = zlib.inflateSync satisfies typeof Bun.inflateSync;
|
||||
|
||||
export const which = ((cmd: string, options) => {
|
||||
const opts: npm_which.Options = { all: false, nothrow: true };
|
||||
if (options?.PATH) opts.path = options.PATH;
|
||||
const result = npm_which.sync(cmd, opts) as string | null;
|
||||
if (!result || !options?.cwd) return result;
|
||||
if (path.normalize(result).includes(path.normalize(options.cwd))) return result;
|
||||
else return null;
|
||||
}) satisfies typeof Bun.which;
|
||||
|
||||
export const spawn = ((...args) => {
|
||||
let cmd: string;
|
||||
let argv: string[];
|
||||
let opts: SpawnOptions.OptionsObject;
|
||||
|
||||
if (args[0] instanceof Array) {
|
||||
cmd = args[0][0];
|
||||
argv = args[0].slice(1);
|
||||
opts = isOptions(args[1]) ? args[1] : {};
|
||||
} else {
|
||||
cmd = args[0].cmd[0];
|
||||
argv = args[0].cmd.slice(1);
|
||||
opts = args[0];
|
||||
Reflect.deleteProperty(opts, 'cmd');
|
||||
}
|
||||
|
||||
let stdio: StdioOptions = [];
|
||||
opts.stdio ??= [undefined, undefined, undefined];
|
||||
if (opts.stdin) opts.stdio[0] = opts.stdin;
|
||||
if (opts.stdout) opts.stdio[1] = opts.stdout;
|
||||
if (opts.stderr) opts.stdio[2] = opts.stderr;
|
||||
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
|
||||
let std = opts.stdio[i];
|
||||
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
|
||||
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
|
||||
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
|
||||
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
|
||||
else stdio[i] = std;
|
||||
}
|
||||
let stdinSrc: typeof opts.stdio[0] = null;
|
||||
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
|
||||
stdinSrc = opts.stdio[0];
|
||||
stdio[0] = 'pipe';
|
||||
}
|
||||
|
||||
const subp = chp.spawn(cmd, argv, {
|
||||
cwd: opts.cwd ?? process.cwd(),
|
||||
// why is this set to (string | number) on env values...
|
||||
env: { ...(opts.env as Record<string, string> ?? process.env) },
|
||||
stdio
|
||||
}) as unknown as Subprocess;
|
||||
const subpAsNode = subp as unknown as ChildProcess;
|
||||
const stdstreams = [subpAsNode.stdin, subpAsNode.stdout, subpAsNode.stderr] as const;
|
||||
if (subpAsNode.stdout) {
|
||||
const rstream = streams.Readable.toWeb(subpAsNode.stdout) as ReadableStream;
|
||||
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
|
||||
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
|
||||
return this;
|
||||
});
|
||||
(<Mutable<Subprocess>>subp).stdout = rstream;
|
||||
}
|
||||
if (subpAsNode.stderr) {
|
||||
const rstream = streams.Readable.toWeb(subpAsNode.stderr) as ReadableStream;
|
||||
Reflect.set(rstream, 'destroy', function (this: ReadableStream, err?: Error) {
|
||||
void (err ? this.cancel(String(err)) : this.cancel()).catch(() => { /* if it fails its already closed */ });
|
||||
return this;
|
||||
});
|
||||
(<Mutable<Subprocess>>subp).stderr = rstream;
|
||||
}
|
||||
let internalStdinStream: streams.Writable;
|
||||
if (subpAsNode.stdin) {
|
||||
const wstream = subpAsNode.stdin;
|
||||
Reflect.set(wstream, 'destroy', function (this: NodeJS.WritableStream, err?: Error) {
|
||||
void this.end(); /* if it fails its already closed */
|
||||
return this;
|
||||
});
|
||||
internalStdinStream = wstream;
|
||||
(<Mutable<Subprocess>>subp).stdin = new FileSink(wstream);
|
||||
|
||||
}
|
||||
Object.defineProperty(subp, 'readable', { get(this: Subprocess) { return this.stdout; } });
|
||||
Object.defineProperty(subp, 'exited', {
|
||||
value: new Promise((resolve, reject) => {
|
||||
subpAsNode.once('exit', (code) => {
|
||||
stdstreams[0]?.destroy();
|
||||
stdstreams[1]?.destroy();
|
||||
stdstreams[2]?.destroy();
|
||||
subp.kill();
|
||||
subp.unref();
|
||||
subpAsNode.disconnect?.();
|
||||
subpAsNode.removeAllListeners();
|
||||
resolve(code);
|
||||
});
|
||||
})
|
||||
});
|
||||
if (stdinSrc) subpAsNode.once('spawn', () => {
|
||||
const stdinWeb = streams.Writable.toWeb(internalStdinStream);
|
||||
if (isArrayBufferView(stdinSrc)) stdinSrc = new Blob([stdinSrc]);
|
||||
if (stdinSrc instanceof Blob) void stdinSrc.stream().pipeTo(stdinWeb);
|
||||
else if (stdinSrc instanceof Response || stdinSrc instanceof Request) void stdinSrc.body!.pipeTo(stdinWeb);
|
||||
else if (typeof stdinSrc === 'number') void fs.createReadStream('', { fd: stdinSrc }).pipe(internalStdinStream);
|
||||
else void stdinSrc;
|
||||
});
|
||||
// change the error stack to point to the spawn() call instead of internal Node.js callback stuff
|
||||
const here = new Error('§__PLACEHOLDER__§');
|
||||
Error.captureStackTrace(here, spawn);
|
||||
if (!subpAsNode.pid) return subpAsNode.once('error', (err: SystemError) => {
|
||||
err.message = (err.syscall ?? `spawn ${err.path ?? ''}`) + ' ' + (err.code ?? String(err.errno ?? ''));
|
||||
err.stack = here.stack!.replace('§__PLACEHOLDER__§', err.message);
|
||||
throw err;
|
||||
}) as unknown as Subprocess;
|
||||
return subp;
|
||||
}) satisfies typeof Bun.spawn;
|
||||
export const spawnSync = ((...args): SyncSubprocess => {
|
||||
let cmd: string;
|
||||
let argv: string[];
|
||||
let opts: SpawnOptions.OptionsObject;
|
||||
if (args[0] instanceof Array) {
|
||||
cmd = args[0][0];
|
||||
argv = args[0].slice(1);
|
||||
opts = isOptions(args[1]) ? args[1] : {};
|
||||
} else {
|
||||
cmd = args[0].cmd[0];
|
||||
argv = args[0].cmd.slice(1);
|
||||
opts = args[0];
|
||||
Reflect.deleteProperty(opts, 'cmd');
|
||||
}
|
||||
|
||||
let stdio: StdioOptions = [];
|
||||
opts.stdio ??= [undefined, undefined, undefined];
|
||||
if (opts.stdin) opts.stdio[0] = opts.stdin;
|
||||
if (opts.stdout) opts.stdio[1] = opts.stdout;
|
||||
if (opts.stderr) opts.stdio[2] = opts.stderr;
|
||||
for (let i = 1; i < 3; i++) { // this intentionally skips stdin
|
||||
let std = opts.stdio[i];
|
||||
if (isArrayBufferView(std)) stdio[i] = streams.Readable.fromWeb(new Blob([std]).stream());
|
||||
else if (std instanceof Blob || isFileBlob(std)) stdio[i] = streams.Readable.fromWeb(std.stream());
|
||||
else if (std instanceof ReadableStream) stdio[i] = streams.Readable.fromWeb(std);
|
||||
else if (std instanceof Response || std instanceof Request) stdio[i] = streams.Readable.fromWeb(std.body!);
|
||||
else stdio[i] = std;
|
||||
}
|
||||
let input: ArrayBufferView | string | undefined;
|
||||
if (opts.stdio[0] && typeof opts.stdio[0] !== 'string') {
|
||||
stdio[0] = null; // will be overriden by chp.spawnSync "input" option
|
||||
//! Due to the fully async nature of Blobs, Responses and Requests,
|
||||
//! we can't synchronously get the data out of them here in userland.
|
||||
if (opts.stdio[0] instanceof Blob) throw new NotImplementedError('Bun.spawnSync({ stdin: <Blob> })', spawnSync);
|
||||
else if (opts.stdio[0] instanceof Response || opts.stdio[0] instanceof Request) throw new NotImplementedError('Bun.spawnSync({ stdin: <Response|Request> })', spawnSync);
|
||||
else if (typeof opts.stdio[0] === 'number') input = fs.readFileSync(opts.stdio[0]);
|
||||
else input = opts.stdio[0] as ArrayBufferView;
|
||||
}
|
||||
|
||||
const subp = chp.spawnSync(cmd, argv, {
|
||||
cwd: opts.cwd ?? process.cwd(),
|
||||
env: { ...(opts.env as Record<string, string> ?? process.env) },
|
||||
stdio, input
|
||||
}) as unknown as SyncSubprocess;
|
||||
const subpAsNode = subp as unknown as SpawnSyncReturns<Buffer>;
|
||||
if (subpAsNode.error) throw subpAsNode.error;
|
||||
|
||||
subp.exitCode = subpAsNode.status ?? NaN; //! not sure what Bun would return here (child killed by signal)
|
||||
subp.success = subp.exitCode === 0;
|
||||
return subp;
|
||||
}) satisfies typeof Bun.spawnSync;
|
||||
|
||||
export const escapeHTML = ((input) => {
|
||||
const str = String(input);
|
||||
let out = '';
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const char = str[i];
|
||||
switch (char) {
|
||||
case '"': out += '"'; break;
|
||||
case "'": out += '''; break;
|
||||
case '&': out += '&'; break;
|
||||
case '<': out += '<'; break;
|
||||
case '>': out += '>'; break;
|
||||
default: out += char;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}) satisfies typeof Bun.escapeHTML;
|
||||
|
||||
export const readableStreamToArrayBuffer = ((stream: ReadableStream<ArrayBufferView | ArrayBufferLike>): ArrayBuffer | Promise<ArrayBuffer> => {
|
||||
return (async () => {
|
||||
const sink = new ArrayBufferSink();
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
sink.write(value);
|
||||
}
|
||||
return sink.end() as ArrayBuffer;
|
||||
})();
|
||||
}) satisfies typeof Bun.readableStreamToArrayBuffer;
|
||||
export const readableStreamToText = (async (stream: ReadableStream<ArrayBufferView | ArrayBuffer>) => {
|
||||
let result = '';
|
||||
const reader = stream.pipeThrough(new TextDecoderStream()).getReader(); ReadableStreamDefaultReader
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
//! for some reason "done" isnt being set to true so this is just infinitely looping at the moment... sigh
|
||||
if (done || !value || !value?.length) break;
|
||||
result += value;
|
||||
}
|
||||
return result;
|
||||
}) satisfies typeof Bun.readableStreamToText;
|
||||
export const readableStreamToBlob = (async (stream: ReadableStream<any>) => {
|
||||
const parts = await readableStreamToArray(stream);
|
||||
return new Blob(parts as BlobPart[]);
|
||||
}) satisfies typeof Bun.readableStreamToBlob;
|
||||
export const readableStreamToArray = (async <T = unknown>(stream: ReadableStream<T>) => {
|
||||
const array = new Array<T>();
|
||||
const reader = stream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done || !value || !(<any>value)?.length) break;
|
||||
array.push(value as unknown as T);
|
||||
}
|
||||
return array;
|
||||
}) satisfies typeof Bun.readableStreamToArray;
|
||||
export const readableStreamToJSON = (async <T = unknown>(stream: ReadableStream<Uint8Array>) => {
|
||||
const text = await readableStreamToText(stream);
|
||||
try {
|
||||
return JSON.parse(text) as T;
|
||||
} catch (err) {
|
||||
Error.captureStackTrace(err as Error, readableStreamToJSON);
|
||||
throw err;
|
||||
}
|
||||
}) satisfies typeof Bun.readableStreamToJSON;
|
||||
|
||||
export const concatArrayBuffers = ((buffers) => {
|
||||
let size = 0;
|
||||
for (const chunk of buffers) size += chunk.byteLength;
|
||||
const buffer = new ArrayBuffer(size);
|
||||
const view = new Uint8Array(buffer);
|
||||
let offset = 0;
|
||||
for (const chunk of buffers) {
|
||||
view.set(new Uint8Array(chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer ? chunk : chunk.buffer), offset);
|
||||
offset += chunk.byteLength;
|
||||
}
|
||||
return buffer;
|
||||
}) satisfies typeof Bun.concatArrayBuffers;
|
||||
|
||||
export const ArrayBufferSink = ArrayBufferSinkPolyfill satisfies typeof Bun.ArrayBufferSink;
|
||||
|
||||
export const pathToFileURL = pathToFileURLNode satisfies typeof Bun.pathToFileURL;
|
||||
export const fileURLToPath = fileURLToPathNode satisfies typeof Bun.fileURLToPath;
|
||||
|
||||
export const dns = dnsPolyfill satisfies typeof Bun.dns;
|
||||
|
||||
export const isMainThread = workers.isMainThread satisfies typeof Bun.isMainThread;
|
||||
|
||||
//! It may be possible to implement plugins with Node ESM loaders, but it would take some effort and have some caveats.
|
||||
//! For now, we'll simply make all calls to Bun.plugin no-op, such that manual implementation of an external ESM loader is possible,
|
||||
//! but without needing to strip out all Bun.plugin calls from the source code for running on Node.
|
||||
const dummyPluginBuilder: PluginBuilder = ({
|
||||
onLoad(constraints: PluginConstraints, callback: OnLoadCallback): void {
|
||||
return; // stubbed
|
||||
},
|
||||
onResolve(constraints: PluginConstraints, callback: OnResolveCallback): void {
|
||||
return; // stubbed
|
||||
},
|
||||
config: { plugins: [], entrypoints: [] },
|
||||
}) satisfies PluginBuilder;
|
||||
const bunPlugin = <T extends BunPlugin>(options: T) => options?.setup?.(dummyPluginBuilder) as ReturnType<T['setup']>;
|
||||
bunPlugin.clearAll = () => void 0;
|
||||
export const plugin = bunPlugin satisfies typeof Bun.plugin;
|
||||
/*void plugin({
|
||||
name: 'test',
|
||||
target: 'bun',
|
||||
setup(builder) {
|
||||
if (builder.target !== 'bun') return;
|
||||
builder.onResolve({ namespace: 'sample', filter: /.+/ }, args => {
|
||||
args.importer;
|
||||
if (args.path === 'foo') return { namespace: 'redirect', path: 'bar' };
|
||||
else return;
|
||||
});
|
||||
builder.onLoad({ namespace: 'sample', filter: /.+/ }, args => {
|
||||
args.path;
|
||||
return { loader: 'object', exports: { foo: 'bar' }, contents: 'void 0;' };
|
||||
});
|
||||
}
|
||||
});*/
|
||||
67
packages/bun-polyfills/src/modules/bun/arraybuffersink.ts
Normal file
67
packages/bun-polyfills/src/modules/bun/arraybuffersink.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
type BunArrayBufferSink = InstanceType<typeof Bun.ArrayBufferSink>;
|
||||
|
||||
export class ArrayBufferSink implements BunArrayBufferSink {
|
||||
#started: boolean = true;
|
||||
#closed: boolean = false;
|
||||
#offset: number = 0;
|
||||
#stream: boolean = false;
|
||||
#asUint8: boolean = false;
|
||||
#buffer: Buffer = Buffer.allocUnsafe(8192);
|
||||
|
||||
get sinkId(): number { return 0; } //? undocumented, seems to always return 0
|
||||
|
||||
#ASSERT_NOT_CLOSED(caller: AnyFunction): void {
|
||||
if (!this.#closed) return;
|
||||
const err = new TypeError('Expected Sink');
|
||||
Error.captureStackTrace(err, caller);
|
||||
throw err;
|
||||
}
|
||||
|
||||
start({ asUint8Array = false, highWaterMark = 8192, stream = false }: Parameters<BunArrayBufferSink['start']>[0] = {}): void {
|
||||
this.#ASSERT_NOT_CLOSED(this.start);
|
||||
this.#started = true;
|
||||
this.#offset = 0;
|
||||
this.#stream = stream;
|
||||
this.#asUint8 = asUint8Array;
|
||||
if (highWaterMark !== this.#buffer.byteLength) this.#buffer = Buffer.allocUnsafe(highWaterMark);
|
||||
}
|
||||
|
||||
write(data: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
|
||||
this.#ASSERT_NOT_CLOSED(this.write);
|
||||
if (typeof data === 'string') data = new TextEncoder().encode(data);
|
||||
const writedata = (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) ? new Uint8Array(data) : new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
|
||||
// this is very bad API design to not throw an error here, but it's what Bun does
|
||||
if (!this.#started) return writedata.byteLength;
|
||||
|
||||
if (this.#offset + writedata.byteLength > this.#buffer.byteLength) {
|
||||
const newLength = Math.ceil((this.#offset + writedata.byteLength) / 1024) * 1024;
|
||||
const newBuffer = Buffer.allocUnsafe(newLength);
|
||||
newBuffer.set(this.#buffer);
|
||||
this.#buffer = newBuffer;
|
||||
}
|
||||
this.#buffer.set(writedata, this.#offset);
|
||||
this.#offset += writedata.byteLength;
|
||||
return writedata.byteLength;
|
||||
}
|
||||
|
||||
flush(): number | Uint8Array | ArrayBuffer {
|
||||
this.#ASSERT_NOT_CLOSED(this.flush);
|
||||
if (!this.#stream) return 0; //! brokenly seems to always return 0 and do nothing
|
||||
const flushed = new Uint8Array(this.#offset);
|
||||
flushed.set(this.#buffer.subarray(0, this.#offset)); // faster than Buffer.copy or Uint8Array.slice
|
||||
this.#offset = 0;
|
||||
return this.#asUint8 ? flushed : flushed.buffer as ArrayBuffer;
|
||||
}
|
||||
|
||||
end(): Uint8Array | ArrayBuffer {
|
||||
this.#ASSERT_NOT_CLOSED(this.end);
|
||||
const stream = this.#stream;
|
||||
this.#stream = true; // force flush() to return the data
|
||||
const buffer = this.flush() as Uint8Array | ArrayBuffer;
|
||||
this.#stream = stream;
|
||||
this.#started = false;
|
||||
return buffer;
|
||||
}
|
||||
|
||||
close(): void { this.#closed = true; } //? undocumented
|
||||
}
|
||||
21
packages/bun-polyfills/src/modules/bun/dns.ts
Normal file
21
packages/bun-polyfills/src/modules/bun/dns.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import dns from 'node:dns';
|
||||
|
||||
const dnsObj: typeof Bun.dns = {
|
||||
async lookup(hostname, options) {
|
||||
const opts = { verbatim: true, all: true } as dns.LookupOptions;
|
||||
if (options?.family) {
|
||||
if (options.family === 'IPv4') opts.family = 4;
|
||||
else if (options.family === 'IPv6') opts.family = 6;
|
||||
else if (options.family === 'any') opts.family = 0;
|
||||
else opts.family = options.family;
|
||||
}
|
||||
if (options?.flags) opts.hints = options.flags;
|
||||
const records = ((await dns.promises.resolveAny(hostname))
|
||||
.filter(r => r.type === 'A' || r.type === 'AAAA') as (dns.AnyARecord | dns.AnyAaaaRecord)[])
|
||||
.map(r => ({ address: r.address, family: r.type === 'A' ? 4 as const : 6 as const, ttl: r.ttl }));
|
||||
return records;
|
||||
},
|
||||
// This has more properties but they're not documented on bun-types yet, oh well.
|
||||
};
|
||||
|
||||
export default dnsObj;
|
||||
195
packages/bun-polyfills/src/modules/bun/fileblob.ts
Normal file
195
packages/bun-polyfills/src/modules/bun/fileblob.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
import fs from 'node:fs';
|
||||
import tty from 'node:tty';
|
||||
import streams from 'node:stream';
|
||||
import { ReadableStream as NodeWebReadableStream } from 'node:stream/web';
|
||||
import { FileSink } from './filesink.js';
|
||||
import { SystemError } from '../../utils/errors.js';
|
||||
import type { FileBlob as BunFileBlob, FileSink as BunFileSink } from 'bun';
|
||||
|
||||
type NodeJSStream = streams.Readable | streams.Writable;
|
||||
|
||||
function NodeJSReadableStreamToBlob(stream: NodeJS.ReadableStream | NodeJS.ReadWriteStream, iostream: boolean = false, type?: string): Promise<Blob> {
|
||||
if (stream.isPaused()) stream.resume();
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: any[] = [];
|
||||
const dataHandler = (chunk: any) => { chunks.push(chunk); if (iostream) end(); };
|
||||
const end = () => {
|
||||
resolve(new Blob(chunks, type != null ? { type } : undefined));
|
||||
stream.off('data', dataHandler);
|
||||
stream.off('end', end);
|
||||
stream.pause();
|
||||
};
|
||||
stream.once('data', dataHandler).once('end', end);
|
||||
//.once('error', reject); Bun waits to error on actual operations on the stream, therefore so will we.
|
||||
});
|
||||
}
|
||||
|
||||
export const NodeJSStreamFileBlob = class FileBlob extends Blob {
|
||||
constructor(source: NodeJSStream, slice: [number?, number?] = [undefined, undefined], type = 'application/octet-stream') {
|
||||
super(undefined, { type });
|
||||
Reflect.deleteProperty(this, 'size');
|
||||
if (source === process.stdout || source === process.stdin || source === process.stderr) {
|
||||
this.#iostream = true;
|
||||
}
|
||||
this.#readable = source instanceof streams.Readable && !(source instanceof tty.WriteStream);
|
||||
this.#source = source;
|
||||
this.#slice = slice;
|
||||
this.#size = Infinity;
|
||||
}
|
||||
readonly #iostream: boolean = false;
|
||||
readonly #readable: boolean;
|
||||
readonly #source: NodeJSStream;
|
||||
readonly #slice: [number?, number?];
|
||||
#size: number;
|
||||
|
||||
slice(begin?: number, end?: number, contentType?: string): Blob;
|
||||
slice(begin?: number, contentType?: string): Blob;
|
||||
slice(contentType?: string): Blob;
|
||||
slice(beginOrType?: number | string, endOrType?: number | string, contentType: string = this.type): Blob {
|
||||
if (typeof beginOrType === 'string') return new FileBlob(this.#source, this.#slice, beginOrType);
|
||||
if (typeof endOrType === 'string') return new FileBlob(this.#source, [beginOrType, undefined], endOrType);
|
||||
return new FileBlob(this.#source, [beginOrType, endOrType], contentType);
|
||||
}
|
||||
|
||||
override stream(): ReadableStream<Uint8Array> {
|
||||
// This makes no sense but Bun does it so we will too
|
||||
if (!this.#readable) return new ReadableStream();
|
||||
return streams.Readable.toWeb(this.#source as streams.Readable);
|
||||
}
|
||||
|
||||
#blobStackFn: AnyFunction = this.#getBlob;
|
||||
|
||||
async #getBlob(): Promise<Blob> {
|
||||
if (!this.#readable) {
|
||||
const err = new SystemError(-1, 'read');
|
||||
Error.captureStackTrace(err, this.#blobStackFn);
|
||||
throw err;
|
||||
}
|
||||
const blob = (await NodeJSReadableStreamToBlob(this.#source as streams.Readable, this.#iostream)).slice(...this.#slice);
|
||||
this.#size = blob.size;
|
||||
return blob;
|
||||
}
|
||||
|
||||
override async text(): Promise<string> {
|
||||
if (this.#blobStackFn !== this.json) this.#blobStackFn = this.text;
|
||||
return (await this.#getBlob()).text();
|
||||
}
|
||||
override async arrayBuffer(): Promise<ArrayBuffer> {
|
||||
this.#blobStackFn = this.arrayBuffer;
|
||||
return (await this.#getBlob()).arrayBuffer();
|
||||
}
|
||||
override async json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> {
|
||||
this.#blobStackFn = this.json;
|
||||
return JSON.parse(await this.text()) as Promise<TJSONReturnType>;
|
||||
}
|
||||
|
||||
override get size(): number { return this.#size; }
|
||||
override set size(_) { return; }
|
||||
};
|
||||
|
||||
export class FileBlob extends Blob implements BunFileBlob {
|
||||
constructor(fdOrPath: number | string, opts: BlobPropertyBag = {}) {
|
||||
opts.type ??= 'application/octet-stream'; // TODO: Get MIME type from file extension
|
||||
super(undefined, opts);
|
||||
Reflect.deleteProperty(this, 'size');
|
||||
if (Reflect.get(opts, '__data')) this.#data = Reflect.get(opts, '__data') as Blob;
|
||||
const slice = Reflect.get(opts, '__slice') as [number?, number?] | undefined;
|
||||
if (slice) {
|
||||
slice[0] &&= slice[0] | 0; // int cast
|
||||
slice[1] &&= slice[1] | 0; // int cast
|
||||
this.#slice = slice;
|
||||
slice[0] ??= 0;
|
||||
if (typeof slice[1] === 'undefined') {
|
||||
if (slice[0] < 0) this.#sliceSize = -slice[0];
|
||||
}
|
||||
else if (slice[0] < 0 && slice[1] < 0) this.#sliceSize = -(slice[0] - slice[1]);
|
||||
else if (slice[0] >= 0 && slice[1] >= 0) this.#sliceSize = slice[1] - slice[0];
|
||||
}
|
||||
if (typeof fdOrPath === 'string') try {
|
||||
this.#fd = fs.openSync(fdOrPath, 'r+');
|
||||
} catch (err) {
|
||||
this.#error = err as SystemError;
|
||||
}
|
||||
else {
|
||||
this.#fd = fdOrPath;
|
||||
this.#error = Reflect.get(opts, '__error') as SystemError | undefined;
|
||||
}
|
||||
if (!this.#error) {
|
||||
const rstream = fs.createReadStream('', { fd: this.#fd, start: this.#slice[0], end: this.#slice[1] });
|
||||
this.#readable = streams.Readable.toWeb(rstream);
|
||||
}
|
||||
}
|
||||
readonly #readable?: NodeWebReadableStream;
|
||||
readonly #error?: SystemError;
|
||||
readonly #slice: [number?, number?] = [];
|
||||
readonly #sliceSize: number = 0;
|
||||
readonly #fd: number = NaN;
|
||||
#data?: Blob;
|
||||
|
||||
#read() {
|
||||
if (this.#error) throw this.#error;
|
||||
const read = fs.readFileSync(this.#fd);
|
||||
this.#data = new Blob([read.subarray(...this.#slice)], { type: this.type });
|
||||
}
|
||||
|
||||
//! Bun 0.2 seems to return undefined for this, this might not be accurate or it's broken on Bun's side
|
||||
get readable(): ReadableStream<any> {
|
||||
if (this.#error) throw this.#error;
|
||||
return this.#readable! as ReadableStream;
|
||||
}
|
||||
|
||||
get lastModified(): number {
|
||||
if (this.#error) throw this.#error;
|
||||
return fs.fstatSync(this.#fd).mtimeMs;
|
||||
}
|
||||
|
||||
async exists(): Promise<boolean> {
|
||||
return !this.#error;
|
||||
}
|
||||
|
||||
writer(): BunFileSink {
|
||||
if (this.#error) throw this.#error;
|
||||
return new FileSink(this.#fd);
|
||||
}
|
||||
|
||||
// TODO: what's contentType?
|
||||
override slice(begin?: number | string, end?: number | string, contentType?: string): FileBlob {
|
||||
if (typeof begin === 'string') {
|
||||
contentType = begin;
|
||||
begin = undefined;
|
||||
}
|
||||
if (typeof end === 'string') {
|
||||
contentType = end;
|
||||
end = undefined;
|
||||
}
|
||||
return new FileBlob(this.#fd, {
|
||||
__error: this.#error,
|
||||
__slice: [begin, end],
|
||||
__data: this.#data?.slice(begin, end),
|
||||
} as BlobPropertyBag);
|
||||
}
|
||||
override arrayBuffer(): Promise<ArrayBuffer> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).arrayBuffer();
|
||||
}
|
||||
override text(): Promise<string> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).text();
|
||||
}
|
||||
override json(): Promise<any>;
|
||||
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
|
||||
override json<TJSONReturnType = unknown>(): Promise<TJSONReturnType> | Promise<any> {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).json();
|
||||
}
|
||||
override stream(): NodeJS.ReadableStream;
|
||||
override stream(): ReadableStream<Uint8Array>;
|
||||
override stream(): ReadableStream<Uint8Array> | NodeJS.ReadableStream {
|
||||
if (!this.#data) this.#read();
|
||||
return new Blob([this.#data ?? '']).stream();
|
||||
}
|
||||
|
||||
override get size(): number {
|
||||
return this.#data?.size ?? (this.#sliceSize || 0);
|
||||
}
|
||||
}
|
||||
87
packages/bun-polyfills/src/modules/bun/filesink.ts
Normal file
87
packages/bun-polyfills/src/modules/bun/filesink.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import fs from 'node:fs';
|
||||
import { SystemError } from '../../utils/errors.js';
|
||||
import type { FileSink as BunFileSink } from 'bun';
|
||||
|
||||
export class FileSink implements BunFileSink {
|
||||
constructor(fdOrPathOrStream: number | string | NodeJS.WritableStream) {
|
||||
if (typeof fdOrPathOrStream === 'string') try {
|
||||
this.#fd = fs.openSync(fdOrPathOrStream, 'a+');
|
||||
fs.ftruncateSync(this.#fd, 0);
|
||||
} catch (err) {
|
||||
throw err as SystemError;
|
||||
}
|
||||
else if (typeof fdOrPathOrStream === 'number') {
|
||||
this.#fd = fdOrPathOrStream; // hope this fd is writable
|
||||
fs.ftruncateSync(this.#fd, 0);
|
||||
}
|
||||
else {
|
||||
this.#stream = fdOrPathOrStream;
|
||||
}
|
||||
}
|
||||
#fd: number = NaN;
|
||||
#stream: NodeJS.WritableStream | undefined;
|
||||
#closed: boolean = false;
|
||||
#writtenSinceFlush: number = 0;
|
||||
#totalWritten: number = 0;
|
||||
|
||||
start(options?: { highWaterMark?: number | undefined; } | undefined): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
ref(): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
unref(): void {
|
||||
return; // TODO
|
||||
}
|
||||
|
||||
write(chunk: string | ArrayBufferView | SharedArrayBuffer | ArrayBuffer): number {
|
||||
if (this.#closed) {
|
||||
return typeof chunk === 'string' ? chunk.length : chunk.byteLength;
|
||||
}
|
||||
if (this.#stream) {
|
||||
let data;
|
||||
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) data = new Uint8Array(chunk);
|
||||
else if (!(chunk instanceof Uint8Array) && typeof chunk !== 'string') data = new Uint8Array(chunk.buffer);
|
||||
else data = chunk;
|
||||
this.#stream.write(data);
|
||||
const written = typeof data === 'string' ? data.length : data.byteLength;
|
||||
this.#totalWritten += written;
|
||||
return written;
|
||||
}
|
||||
if (typeof chunk === 'string') {
|
||||
fs.appendFileSync(this.#fd, chunk, 'utf8');
|
||||
this.#writtenSinceFlush += chunk.length;
|
||||
return chunk.length;
|
||||
}
|
||||
if (chunk instanceof ArrayBuffer || chunk instanceof SharedArrayBuffer) fs.appendFileSync(this.#fd, new Uint8Array(chunk));
|
||||
else fs.appendFileSync(this.#fd, new Uint8Array(chunk.buffer));
|
||||
this.#writtenSinceFlush += chunk.byteLength;
|
||||
return chunk.byteLength;
|
||||
}
|
||||
|
||||
//! flushing after writing to a closed FileSink segfaults in Bun but I don't see the need to implement that behavior
|
||||
flush(): number | Promise<number> {
|
||||
if (this.#closed) return 0;
|
||||
// no-op because this is a synchronous implementation
|
||||
const written = this.#writtenSinceFlush;
|
||||
this.#writtenSinceFlush = 0;
|
||||
return written;
|
||||
}
|
||||
|
||||
//! not sure what to do with this error
|
||||
end(error?: Error): number | Promise<number> {
|
||||
if (this.#closed) return this.#totalWritten;
|
||||
const flushed = this.flush();
|
||||
if (this.#stream) {
|
||||
this.#stream.end();
|
||||
this.#closed = true;
|
||||
return flushed;
|
||||
}
|
||||
this.#totalWritten = fs.fstatSync(this.#fd).size;
|
||||
fs.closeSync(this.#fd);
|
||||
this.#closed = true;
|
||||
return flushed;
|
||||
}
|
||||
}
|
||||
185
packages/bun-polyfills/src/modules/bun/hashes.ts
Normal file
185
packages/bun-polyfills/src/modules/bun/hashes.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import type { CryptoHashInterface, DigestEncoding, Hash } from 'bun';
|
||||
import nodecrypto from 'node:crypto';
|
||||
import os from 'node:os';
|
||||
import md4, { Md4 } from 'js-md4';
|
||||
import { wyhash, adler32, crc32, cityhash32, cityhash64, murmur32v3, murmur64v2, murmur32v2 } from '../../../lib/zighash/index.mjs';
|
||||
|
||||
export const bunHash = ((data, seed = 0): bigint => wyhash(data, BigInt(seed))) as typeof Bun.hash;
|
||||
export const bunHashProto: Hash = {
|
||||
wyhash(data, seed = 0n) { return wyhash(data, seed); },
|
||||
adler32(data) { return adler32(data); },
|
||||
crc32(data) { return crc32(data); },
|
||||
cityHash32(data) { return cityhash32(data); },
|
||||
cityHash64(data, seed = 0n) { return cityhash64(data, seed); },
|
||||
murmur32v3(data, seed = 0) { return murmur32v3(data, seed); },
|
||||
murmur32v2(data, seed = 0) { return murmur32v2(data, seed); },
|
||||
murmur64v2(data, seed = 0n) { return murmur64v2(data, seed); },
|
||||
};
|
||||
|
||||
type HashImpl = {
|
||||
digest(): Buffer;
|
||||
digest(encoding: nodecrypto.BinaryToTextEncoding): string;
|
||||
update(data: nodecrypto.BinaryLike): HashImpl;
|
||||
update(data: string, inputEncoding: nodecrypto.Encoding): HashImpl;
|
||||
};
|
||||
abstract class BaseHash<T> implements CryptoHashInterface<T> {
|
||||
readonly #hash: HashImpl | null;
|
||||
constructor(algorithm: string | HashImpl) {
|
||||
if (typeof algorithm === 'string') this.#hash = nodecrypto.createHash(algorithm);
|
||||
// If no preset algorithm is given, expect the subclass to fully implement its own.
|
||||
else this.#hash = algorithm;
|
||||
}
|
||||
update(data: StringOrBuffer) {
|
||||
if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) this.#hash!.update(new Uint8Array(data));
|
||||
else this.#hash!.update(data);
|
||||
return this as unknown as T; // is there any good way to do this without asserting?
|
||||
}
|
||||
digest(encoding: DigestEncoding): string;
|
||||
digest(hashInto?: TypedArray): TypedArray;
|
||||
digest(encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
if (typeof encodingOrHashInto === 'string') {
|
||||
const encoded = this.#hash!.digest(encodingOrHashInto);
|
||||
// you'd think node would throw an error if the encoding is invalid, but nope!
|
||||
// instead it silently returns as if you passed no encoding and gives a Buffer...
|
||||
if (Buffer.isBuffer(encoded)) throw new TypeError(`Unknown encoding: "${encodingOrHashInto}"`);
|
||||
else return encoded;
|
||||
}
|
||||
const digested = this.#hash!.digest();
|
||||
if (encodingOrHashInto === undefined) return new Uint8Array(digested.buffer, digested.byteOffset, digested.byteLength);
|
||||
if (encodingOrHashInto.byteLength < this.byteLength) throw new TypeError(`TypedArray must be at least ${this.byteLength} bytes`);
|
||||
if (encodingOrHashInto instanceof BigInt64Array || encodingOrHashInto instanceof BigUint64Array) {
|
||||
// avoid checking endianness for every loop iteration
|
||||
const endianAwareInsert = os.endianness() === 'LE'
|
||||
? (arr: string[], j: number, num: string) => arr[7 - j] = num
|
||||
: (arr: string[], j: number, num: string) => arr[j] = num;
|
||||
|
||||
for (let i = 0; i < digested.byteLength; i += 8) {
|
||||
const bigintStrArr = ['', '', '', '', '', '', '', ''];
|
||||
for (let j = 0; j < 8; j++) {
|
||||
const byte = digested[i + j];
|
||||
if (byte === undefined) break;
|
||||
endianAwareInsert(bigintStrArr, j, byte.toString(16).padStart(2, '0'));
|
||||
}
|
||||
encodingOrHashInto[i / 8] = BigInt(`0x${bigintStrArr.join('')}`);
|
||||
}
|
||||
} else {
|
||||
const HashIntoTypedArray = encodingOrHashInto.constructor as TypedArrayConstructor;
|
||||
// this will work as long as all hash classes have a byteLength that is a multiple of 4 bytes
|
||||
encodingOrHashInto.set(new HashIntoTypedArray(digested.buffer, digested.byteOffset, digested.byteLength / HashIntoTypedArray.BYTES_PER_ELEMENT));
|
||||
}
|
||||
return encodingOrHashInto;
|
||||
}
|
||||
static hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray { return '' };
|
||||
static readonly byteLength: number;
|
||||
abstract readonly byteLength: number;
|
||||
}
|
||||
|
||||
export class SHA1 extends BaseHash<SHA1> {
|
||||
constructor() { super('sha1'); }
|
||||
static override readonly byteLength = 20;
|
||||
override readonly byteLength = 20;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class MD4 extends BaseHash<MD4> {
|
||||
constructor() { //! Not supported by nodecrypto
|
||||
const hash = md4.create() as unknown as Omit<Md4, 'toString'> & { _update: Md4['update'] };
|
||||
function digest(): Buffer;
|
||||
function digest(encoding: nodecrypto.BinaryToTextEncoding): string;
|
||||
function digest(encoding?: nodecrypto.BinaryToTextEncoding) {
|
||||
const buf = Buffer.from(hash.arrayBuffer());
|
||||
if (encoding) return buf.toString(encoding);
|
||||
else return buf;
|
||||
}
|
||||
function update(data: nodecrypto.BinaryLike) {
|
||||
if (typeof data === 'string') hash._update(data);
|
||||
else if (data instanceof ArrayBuffer || data instanceof SharedArrayBuffer) hash._update(new Uint8Array(data));
|
||||
else hash._update(new Uint8Array(data.buffer));
|
||||
return hash as unknown as MD4HashImpl;
|
||||
}
|
||||
type MD4HashImpl = Omit<Md4, 'toString'> & { digest: typeof digest, update: typeof update };
|
||||
// @ts-expect-error patches to reuse the BaseHash methods
|
||||
hash.digest = digest; hash._update = hash.update; hash.update = update;
|
||||
super(hash as unknown as MD4HashImpl);
|
||||
}
|
||||
static override readonly byteLength = 16;
|
||||
override readonly byteLength = 16;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class MD5 extends BaseHash<MD5> {
|
||||
constructor() { super('md5'); }
|
||||
static override readonly byteLength = 16;
|
||||
override readonly byteLength = 16;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA224 extends BaseHash<SHA224> {
|
||||
constructor() { super('sha224'); }
|
||||
static override readonly byteLength = 28;
|
||||
override readonly byteLength = 28;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA512 extends BaseHash<SHA512> {
|
||||
constructor() { super('sha512'); }
|
||||
static override readonly byteLength = 64;
|
||||
override readonly byteLength = 64;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA384 extends BaseHash<SHA384> {
|
||||
constructor() { super('sha384'); }
|
||||
static override readonly byteLength = 48;
|
||||
override readonly byteLength = 48;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA256 extends BaseHash<SHA256> {
|
||||
constructor() { super('sha256'); }
|
||||
static override readonly byteLength = 32;
|
||||
override readonly byteLength = 32;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
export class SHA512_256 extends BaseHash<SHA512_256> {
|
||||
constructor() { super('sha512-256'); }
|
||||
static override readonly byteLength = 32;
|
||||
override readonly byteLength = 32;
|
||||
static override hash(data: StringOrBuffer, encoding?: DigestEncoding): string;
|
||||
static override hash(data: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
static override hash(data: StringOrBuffer, encodingOrHashInto?: DigestEncoding | TypedArray): string | TypedArray {
|
||||
const instance = new this(); instance.update(data);
|
||||
return instance.digest(encodingOrHashInto as DigestEncoding & TypedArray);
|
||||
}
|
||||
}
|
||||
103
packages/bun-polyfills/src/modules/bun/transpiler.ts
Normal file
103
packages/bun-polyfills/src/modules/bun/transpiler.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import type { JavaScriptLoader, TranspilerOptions, Transpiler as BunTranspiler, Import } from 'bun';
|
||||
import { NotImplementedError } from '../../utils/errors.js';
|
||||
|
||||
// TODO: Possible implementation with WASM builds of bun with just the transpiler?
|
||||
// NOTE: This is possible to implement with something like SWC, and was previously done,
|
||||
// but it has lots of quirks due to the differences between SWC and Bun, so the plan is
|
||||
// to not do that unless there is actual demand for using Bun.Transpiler in Node.js before
|
||||
// the WASM build is worked on. The signatures are here for now as a placeholder.
|
||||
|
||||
export default class Transpiler implements BunTranspiler {
|
||||
constructor(options?: TranspilerOptions) {
|
||||
this.#options = options ?? {};
|
||||
}
|
||||
|
||||
async transform(code: StringOrBuffer, loader: JavaScriptLoader): Promise<string> {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
throw new NotImplementedError('Bun.Transpiler', this.transform);
|
||||
}
|
||||
|
||||
transformSync(code: StringOrBuffer, ctx: object): string;
|
||||
transformSync(code: StringOrBuffer, loader: JavaScriptLoader, ctx: object): string;
|
||||
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | undefined): string;
|
||||
transformSync(code: StringOrBuffer, loader?: JavaScriptLoader | object, ctx: object = {}): string {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
if (typeof loader !== 'string') loader = 'js';
|
||||
throw new NotImplementedError('Bun.Transpiler', this.transformSync);
|
||||
}
|
||||
|
||||
scan(code: StringOrBuffer): { exports: string[]; imports: Import[]; } {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
throw new NotImplementedError('Bun.Transpiler', this.scan);
|
||||
//return {
|
||||
// imports: this.scanImports(code),
|
||||
// exports: this.#scanExports(code)
|
||||
//};
|
||||
}
|
||||
|
||||
scanImports(code: StringOrBuffer): {
|
||||
kind: 'import-statement' | 'dynamic-import';
|
||||
path: string;
|
||||
}[] {
|
||||
if (typeof code !== 'string') code = new TextDecoder().decode(code);
|
||||
throw new NotImplementedError('Bun.Transpiler', this.scanImports);
|
||||
//const imports: { kind: 'import-statement' | 'dynamic-import', path: string }[] = [];
|
||||
//this.#scanTopLevelImports(code).forEach(x => imports.push({ kind: 'import-statement', path: x }));
|
||||
//this.#scanDynamicImports(code).forEach(x => imports.push({ kind: 'dynamic-import', path: x }));
|
||||
//return imports;
|
||||
}
|
||||
|
||||
/*#scanDynamicImports(code: string): string[] {
|
||||
return this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body.filter(x => x.type === 'ExpressionStatement' && x.expression.type === 'CallExpression' && x.expression.callee.type === 'Import')
|
||||
.map(i => (((i as swc.ExpressionStatement).expression as swc.CallExpression).arguments[0].expression as swc.StringLiteral).value);
|
||||
}*/
|
||||
|
||||
/*#scanTopLevelImports(code: string): string[] {
|
||||
return this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body.filter(x => x.type === 'ImportDeclaration' || x.type === 'ExportAllDeclaration' || x.type === 'ExportNamedDeclaration')
|
||||
.filter(i => !(i as swc.ImportDeclaration).typeOnly)
|
||||
.map(i => (i as swc.ImportDeclaration).source.value);
|
||||
}*/
|
||||
|
||||
/*#scanExports(code: string, includeDefault: boolean = false): string[] {
|
||||
const parsed = this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body;
|
||||
const exports = [];
|
||||
exports.push(parsed.filter(x => x.type === 'ExportDeclaration' && !x.declaration.declare)
|
||||
.flatMap(i => ((i as swc.ExportDeclaration).declaration as swc.ClassDeclaration).identifier?.value ??
|
||||
((i as swc.ExportDeclaration).declaration as swc.VariableDeclaration).declarations.map(d => (d.id as swc.Identifier).value)
|
||||
)
|
||||
);
|
||||
exports.push(parsed.filter(x => x.type === 'ExportNamedDeclaration')
|
||||
.flatMap(i => (i as swc.ExportNamedDeclaration).specifiers
|
||||
.filter(s => s.type === 'ExportSpecifier' && !s.isTypeOnly)
|
||||
.map(s => (s as swc.NamedExportSpecifier).exported?.value ?? (s as swc.NamedExportSpecifier).orig.value)
|
||||
)
|
||||
);
|
||||
if (includeDefault) exports.push(this.#scanDefaultExport(code) ?? []);
|
||||
return exports.flat();
|
||||
}*/
|
||||
|
||||
/*#scanDefaultExport(code: string): 'default' | undefined {
|
||||
const parsed = this.parseSync(code, {
|
||||
syntax: this.#syntax, target: 'es2022', tsx: this.#options.loader === 'tsx'
|
||||
}).body;
|
||||
|
||||
const defaultExportDecl = parsed.find(x => x.type === 'ExportDefaultDeclaration') as swc.ExportDefaultDeclaration | undefined;
|
||||
if (!defaultExportDecl) {
|
||||
const defaultExportExpr = parsed.find(x => x.type === 'ExportDefaultExpression') as swc.ExportDefaultExpression | undefined;
|
||||
if (!defaultExportExpr) return undefined;
|
||||
if (!defaultExportExpr.expression.type.startsWith('Ts')) return 'default';
|
||||
else return undefined;
|
||||
}
|
||||
|
||||
if (!defaultExportDecl.decl.type.startsWith('Ts') && !Reflect.get(defaultExportDecl.decl, 'declare')) return 'default';
|
||||
else return undefined;
|
||||
}*/
|
||||
|
||||
#options: TranspilerOptions;
|
||||
}
|
||||
111
packages/bun-polyfills/src/modules/jsc.ts
Normal file
111
packages/bun-polyfills/src/modules/jsc.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import type jsc from 'bun:jsc';
|
||||
import v8 from 'node:v8';
|
||||
//import { setRandomSeed, getRandomSeed } from './mathrandom.js';
|
||||
import { NotImplementedError, getCallSites } from '../utils/errors.js';
|
||||
import { gc } from './bun.js';
|
||||
|
||||
const STUB = () => void 0;
|
||||
|
||||
function jscSerialize(value: any, options?: { binaryType: 'nodebuffer'; }): Buffer;
|
||||
function jscSerialize(value: any, options?: { binaryType?: 'arraybuffer'; }): SharedArrayBuffer;
|
||||
function jscSerialize(value: any, options?: { binaryType?: string }): Buffer | SharedArrayBuffer {
|
||||
const serialized = v8.serialize(value);
|
||||
if (options?.binaryType === 'nodebuffer') return serialized;
|
||||
else return new SharedArrayBuffer(serialized.byteLength);
|
||||
}
|
||||
// TODO: Investigate ways of making these the actual JSC serialization format (probably Bun WASM)
|
||||
// TODO: whilst this works for common use-cases like Node <-> Node it still does not make it
|
||||
// TODO: possible for Node <-> Bun transfers of this kind of data, which might be interesting to have.
|
||||
export const serialize = jscSerialize satisfies typeof jsc.serialize;
|
||||
export const deserialize = (value => {
|
||||
if (value instanceof ArrayBuffer || value instanceof SharedArrayBuffer) return v8.deserialize(Buffer.from(value));
|
||||
else return v8.deserialize(value);
|
||||
}) satisfies typeof jsc.deserialize;
|
||||
|
||||
export const setTimeZone = ((timeZone: string) => {
|
||||
const resolvedTZ = Intl.DateTimeFormat(undefined, { timeZone }).resolvedOptions().timeZone;
|
||||
return process.env.TZ = resolvedTZ;
|
||||
}) satisfies typeof jsc.setTimeZone;
|
||||
|
||||
export const callerSourceOrigin = (() => {
|
||||
const callsites: NodeJS.CallSite[] = getCallSites(2);
|
||||
// This may be inaccurate with async code. Needs more testing.
|
||||
let lastSeenURL = '';
|
||||
for (const callsite of callsites) {
|
||||
const sourceURL = callsite.getScriptNameOrSourceURL();
|
||||
if (sourceURL.startsWith('file://')) lastSeenURL = sourceURL;
|
||||
}
|
||||
return lastSeenURL;
|
||||
}) satisfies typeof jsc.callerSourceOrigin;
|
||||
|
||||
// TODO: Like with jsc.serialize/deserialize, these may be possible with Bun WASM.
|
||||
export const jscDescribe = (() => { throw new NotImplementedError('jsc.jscDescribe', STUB); }) satisfies typeof jsc.jscDescribe;
|
||||
export const jscDescribeArray = (() => { throw new NotImplementedError('jsc.jscDescribeArray', STUB); }) satisfies typeof jsc.jscDescribeArray;
|
||||
// These are no longer documented but still exist.
|
||||
export const describe = jscDescribe;
|
||||
export const describeArray = jscDescribeArray;
|
||||
|
||||
// Node.js only provides a singular non-configurable global GC function, so we have to make do with that.
|
||||
export const edenGC = gc satisfies typeof jsc.edenGC;
|
||||
export const fullGC = gc satisfies typeof jsc.fullGC;
|
||||
export const gcAndSweep = gc satisfies typeof jsc.gcAndSweep;
|
||||
|
||||
export const drainMicrotasks = STUB satisfies typeof jsc.drainMicrotasks; // no-op
|
||||
export const releaseWeakRefs = STUB satisfies typeof jsc.releaseWeakRefs; // no-op
|
||||
export const startSamplingProfiler = STUB satisfies typeof jsc.startSamplingProfiler; // no-op
|
||||
//! likely broken but needs more testing
|
||||
export const startRemoteDebugger = STUB satisfies typeof jsc.startRemoteDebugger; // no-op
|
||||
|
||||
//! this is a really poor polyfill but it's better than nothing
|
||||
export const getProtectedObjects = (() => { return [globalThis]; }) satisfies typeof jsc.getProtectedObjects;
|
||||
|
||||
export const getRandomSeed = 0; // TODO
|
||||
export const setRandomSeed = 0; // TODO
|
||||
|
||||
export const heapSize = (() => { return v8.getHeapStatistics().used_heap_size; }) satisfies typeof jsc.heapSize;
|
||||
export const heapStats = (() => {
|
||||
const stats = v8.getHeapStatistics();
|
||||
return {
|
||||
heapSize: stats.used_heap_size,
|
||||
heapCapacity: stats.total_available_size,
|
||||
extraMemorySize: stats.external_memory ?? 0,
|
||||
objectCount: 1, // TODO: how to get this in node?
|
||||
protectedObjectCount: getProtectedObjects().length,
|
||||
globalObjectCount: 2, // TODO: this one is probably fine hardcoded but is there a way to get this in node?
|
||||
protectedGlobalObjectCount: 1, // TODO: ^
|
||||
objectTypeCounts: {}, //! can't really throw an error here, so just return an empty object (TODO: how to get this in node?)
|
||||
protectedObjectTypeCounts: {} //! can't really throw an error here, so just return an empty object (TODO: how to get this in node?)
|
||||
};
|
||||
}) satisfies typeof jsc.heapStats;
|
||||
|
||||
//! doubtful anyone relies on the return of this for anything besides debugging
|
||||
export const isRope = (() => false) satisfies typeof jsc.isRope;
|
||||
|
||||
export const memoryUsage = (() => {
|
||||
const stats = v8.getHeapStatistics();
|
||||
const resUse = process.resourceUsage();
|
||||
return {
|
||||
current: stats.malloced_memory,
|
||||
peak: stats.peak_malloced_memory,
|
||||
currentCommit: stats.malloced_memory,
|
||||
peakCommit: stats.malloced_memory,
|
||||
pageFaults: resUse.minorPageFault + resUse.majorPageFault
|
||||
};
|
||||
}) satisfies typeof jsc.memoryUsage;
|
||||
|
||||
//! these are likely broken, seemingly always returning undefined which does not match the documented return types
|
||||
export const noFTL = (() => { return void 0 as unknown as Function; }) satisfies typeof jsc.noFTL;
|
||||
export const noOSRExitFuzzing = (() => { return void 0 as unknown as Function; }) satisfies typeof jsc.noOSRExitFuzzing;
|
||||
//! likely broken, seems to always returns zero
|
||||
export const totalCompileTime = (() => 0) satisfies typeof jsc.totalCompileTime;
|
||||
//! likely broken, seem to always returns 0 if any arguments are passed, undefined otherwise
|
||||
export const numberOfDFGCompiles = ((...args) => args.length ? 0 : void 0 as unknown as number) satisfies typeof jsc.numberOfDFGCompiles;
|
||||
export const reoptimizationRetryCount = ((...args) => args.length ? 0 : void 0 as unknown as number) satisfies typeof jsc.reoptimizationRetryCount;
|
||||
|
||||
//! The following are very likely impossible to ever polyfill.
|
||||
export const profile = (() => {
|
||||
throw new NotImplementedError('jsc.profile is not polyfillable', STUB, true);
|
||||
}) satisfies typeof jsc.profile;
|
||||
export const optimizeNextInvocation = (() => {
|
||||
throw new NotImplementedError('jsc.optimizeNextInvocation is not polyfillable', STUB, true);
|
||||
}) satisfies typeof jsc.optimizeNextInvocation;
|
||||
31
packages/bun-polyfills/src/repl.ts
Normal file
31
packages/bun-polyfills/src/repl.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import bun from './index.js';
|
||||
import * as jsc from './modules/jsc.js';
|
||||
|
||||
// This file serves two purposes:
|
||||
// 1. It is the entry point for using the Bun global in the REPL. (--import this file)
|
||||
// 2. It makes TypeScript check the full structural compatibility of the Bun global vs the polyfills object,
|
||||
// which allows for the type assertion below to be used as a TODO list index.
|
||||
|
||||
globalThis.Bun = bun as typeof bun & {
|
||||
// TODO: Missing polyfills
|
||||
readableStreamToFormData: typeof import('bun').readableStreamToFormData;
|
||||
deepEquals: typeof import('bun').deepEquals;
|
||||
deepMatch: typeof import('bun').deepMatch;
|
||||
build: typeof import('bun').build;
|
||||
mmap: typeof import('bun').mmap;
|
||||
connect: typeof import('bun').connect;
|
||||
listen: typeof import('bun').listen;
|
||||
Transpiler: typeof import('bun').Transpiler;
|
||||
password: typeof import('bun').password;
|
||||
CryptoHashInterface: typeof import('bun').CryptoHashInterface;
|
||||
CryptoHasher: typeof import('bun').CryptoHasher;
|
||||
FileSystemRouter: typeof import('bun').FileSystemRouter;
|
||||
|
||||
//? Polyfilled but with broken types (See each one in ./src/modules/bun.ts for details)
|
||||
generateHeapSnapshot: typeof import('bun').generateHeapSnapshot;
|
||||
stdout: typeof import('bun').stdout;
|
||||
stderr: typeof import('bun').stderr;
|
||||
stdin: typeof import('bun').stdin;
|
||||
};
|
||||
|
||||
Reflect.set(globalThis, 'jsc', jsc);
|
||||
13
packages/bun-polyfills/src/types/helpers.d.ts
vendored
Normal file
13
packages/bun-polyfills/src/types/helpers.d.ts
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
type AnyFunction = (...args: any[]) => any;
|
||||
type AnyClass = new (...args: any[]) => any;
|
||||
type AnyCallable = AnyFunction | AnyClass;
|
||||
|
||||
type MapKeysType<T extends Map<unknown, unknown>> = T extends Map<infer K, infer V> ? K : never;
|
||||
type MapValuesType<T extends Map<unknown, unknown>> = T extends Map<infer K, infer V> ? V : never;
|
||||
|
||||
type Mutable<T> = { -readonly [K in keyof T]: T[K] };
|
||||
|
||||
/** Excluding the BigInt typed arrays */
|
||||
type TypedArrayConstructor =
|
||||
| typeof Uint8Array | typeof Uint16Array | typeof Uint32Array | typeof Uint8ClampedArray
|
||||
| typeof Int8Array | typeof Int16Array | typeof Int32Array | typeof Float32Array | typeof Float64Array;
|
||||
72
packages/bun-polyfills/src/types/md4.d.ts
vendored
Normal file
72
packages/bun-polyfills/src/types/md4.d.ts
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
declare module 'js-md4' {
|
||||
export type MD4Input = string | ArrayBuffer | Uint8Array | number[];
|
||||
|
||||
interface md4 {
|
||||
/**
|
||||
* # Broken, will throw an error.
|
||||
* @deprecated Use {@link md4.hex} instead.
|
||||
*/
|
||||
(input: MD4Input): never;
|
||||
/** Creates an `Md4` hasher instance. */
|
||||
create(): Md4;
|
||||
/** Shortcut for `md4.create().update(...)` */
|
||||
update(message: MD4Input): Md4;
|
||||
/** Hash `message` into a hex string. */
|
||||
hex(message: MD4Input): string;
|
||||
/** Hash `message` into an Array. */
|
||||
array(message: MD4Input): number[];
|
||||
/** Identical to {@link md4.array}. */
|
||||
digest(message: MD4Input): number[];
|
||||
/**
|
||||
* Identical to {@link md4.arrayBuffer}.
|
||||
* @deprecated Use {@link md4.arrayBuffer} instead.
|
||||
*/
|
||||
buffer(message: MD4Input): ArrayBuffer;
|
||||
/** Hash `message` into an ArrayBuffer. */
|
||||
arrayBuffer(message: MD4Input): ArrayBuffer;
|
||||
}
|
||||
|
||||
export type Md4 = Md4;
|
||||
declare class Md4 {
|
||||
private constructor();
|
||||
|
||||
private toString(): string;
|
||||
private finalize(): void;
|
||||
private hash(): void;
|
||||
/**
|
||||
* Append `message` to the internal hash source data.
|
||||
* @returns A reference to `this` for chaining, or nothing if the instance has been finalized.
|
||||
*/
|
||||
update(message: MD4Input): this | void;
|
||||
/** Hash into a hex string. Finalizes the hash. */
|
||||
hex(): string;
|
||||
/** Hash into an Array. Finalizes the hash. */
|
||||
array(): number[];
|
||||
/** Identical to {@link Md4.array}. */
|
||||
digest(): number[];
|
||||
/**
|
||||
* Identical to {@link Md4.arrayBuffer}.
|
||||
* @deprecated Use {@link Md4.arrayBuffer} instead.
|
||||
*/
|
||||
buffer(): ArrayBuffer;
|
||||
/** Hash into an ArrayBuffer. Finalizes the hash. */
|
||||
arrayBuffer(): ArrayBuffer;
|
||||
|
||||
private buffer8: Uint8Array;
|
||||
private blocks: Uint32Array;
|
||||
private bytes: number;
|
||||
private start: number;
|
||||
private h3: number;
|
||||
private h2: number;
|
||||
private h1: number;
|
||||
private h0: number;
|
||||
readonly hashed: boolean;
|
||||
/** If true, `update()` operations will silently fail. */
|
||||
readonly finalized: boolean;
|
||||
readonly first: boolean;
|
||||
private lastByteIndex?: number;
|
||||
}
|
||||
|
||||
const md4: md4;
|
||||
export default md4;
|
||||
}
|
||||
30
packages/bun-polyfills/src/types/sync.d.ts
vendored
Normal file
30
packages/bun-polyfills/src/types/sync.d.ts
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
// This file explicitly redefines global types used in order to enforce the correct types,
|
||||
// regardless of the arbitrary order in which TSC/TSServer decide to load the type libraries in.
|
||||
// Annoyingly, even this file can sometimes break, so if your types are inverted, try restarting TSServer.
|
||||
|
||||
import '@types/node';
|
||||
|
||||
declare module 'stream/web' {
|
||||
interface ReadableStreamDefaultReader {
|
||||
readMany(): Promise<ReadableStreamDefaultReadManyResult<any>>;
|
||||
}
|
||||
}
|
||||
|
||||
declare global {
|
||||
var performance: typeof import('perf_hooks').performance;
|
||||
|
||||
// TODO: These should be contributed to @types/node upstream
|
||||
namespace NodeJS {
|
||||
interface CallSite {
|
||||
getScriptNameOrSourceURL(): string;
|
||||
getEnclosingColumnNumber(): number;
|
||||
getEnclosingLineNumber(): number;
|
||||
getPosition(): number;
|
||||
getPromiseIndex(): number;
|
||||
getScriptHash(): string;
|
||||
isAsync(): boolean;
|
||||
isPromiseAll(): boolean;
|
||||
toString(): string;
|
||||
}
|
||||
}
|
||||
}
|
||||
24
packages/bun-polyfills/src/types/v8heapsnapshot.d.ts
vendored
Normal file
24
packages/bun-polyfills/src/types/v8heapsnapshot.d.ts
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
interface V8HeapSnapshot {
|
||||
snapshot: {
|
||||
meta: {
|
||||
node_fields: string[],
|
||||
node_types: [string[], ...string[]],
|
||||
edge_fields: string[],
|
||||
edge_types: [string[], ...string[]],
|
||||
trace_function_info_fields: string[],
|
||||
trace_node_fields: string[],
|
||||
sample_fields: string[],
|
||||
location_fields: string[]
|
||||
},
|
||||
node_count: number,
|
||||
edge_count: number,
|
||||
trace_function_count: number
|
||||
},
|
||||
nodes: number[],
|
||||
edges: number[],
|
||||
trace_function_infos: unknown[],
|
||||
trace_tree: unknown[],
|
||||
samples: unknown[],
|
||||
locations: number[],
|
||||
strings: string[]
|
||||
}
|
||||
230
packages/bun-polyfills/src/utils/errors.ts
Normal file
230
packages/bun-polyfills/src/utils/errors.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
type PosixErrNo = MapKeysType<ReturnType<typeof getPosixSystemErrorMap>>;
|
||||
type Win32ErrNo = MapKeysType<ReturnType<typeof getWin32SystemErrorMap>>;
|
||||
|
||||
export function getCallSites(sliceOff = 1) {
|
||||
const originalPST = Error.prepareStackTrace;
|
||||
Error.prepareStackTrace = (error, stack) => stack;
|
||||
const { stack } = new Error();
|
||||
if (stack?.constructor.name !== 'Array') throw new Error('Failed to acquire structured JS stack trace');
|
||||
Error.prepareStackTrace = originalPST;
|
||||
return (stack as unknown as NodeJS.CallSite[]).slice(sliceOff);
|
||||
}
|
||||
|
||||
export function getPosixSystemErrorMap() {
|
||||
return new Map([
|
||||
[ -7, [ 'E2BIG', 'argument list too long' ] ],
|
||||
[ -13, [ 'EACCES', 'permission denied' ] ],
|
||||
[ -98, [ 'EADDRINUSE', 'address already in use' ] ],
|
||||
[ -99, [ 'EADDRNOTAVAIL', 'address not available' ] ],
|
||||
[ -97, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
|
||||
[ -11, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
|
||||
[ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
|
||||
[ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
|
||||
[ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
|
||||
[ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
|
||||
[ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
|
||||
[ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
|
||||
[ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
|
||||
[ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
|
||||
[ -3007, [ 'EAI_NODATA', 'no address' ] ],
|
||||
[ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
|
||||
[ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
|
||||
[ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
|
||||
[ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
|
||||
[ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
|
||||
[ -114, [ 'EALREADY', 'connection already in progress' ] ],
|
||||
[ -9, [ 'EBADF', 'bad file descriptor' ] ],
|
||||
[ -16, [ 'EBUSY', 'resource busy or locked' ] ],
|
||||
[ -125, [ 'ECANCELED', 'operation canceled' ] ],
|
||||
[ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
|
||||
[ -103, [ 'ECONNABORTED', 'software caused connection abort' ] ],
|
||||
[ -111, [ 'ECONNREFUSED', 'connection refused' ] ],
|
||||
[ -104, [ 'ECONNRESET', 'connection reset by peer' ] ],
|
||||
[ -89, [ 'EDESTADDRREQ', 'destination address required' ] ],
|
||||
[ -17, [ 'EEXIST', 'file already exists' ] ],
|
||||
[ -14, [ 'EFAULT', 'bad address in system call argument' ] ],
|
||||
[ -27, [ 'EFBIG', 'file too large' ] ],
|
||||
[ -113, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
|
||||
[ -4, [ 'EINTR', 'interrupted system call' ] ],
|
||||
[ -22, [ 'EINVAL', 'invalid argument' ] ],
|
||||
[ -5, [ 'EIO', 'i/o error' ] ],
|
||||
[ -106, [ 'EISCONN', 'socket is already connected' ] ],
|
||||
[ -21, [ 'EISDIR', 'illegal operation on a directory' ] ],
|
||||
[ -40, [ 'ELOOP', 'too many symbolic links encountered' ] ],
|
||||
[ -24, [ 'EMFILE', 'too many open files' ] ],
|
||||
[ -90, [ 'EMSGSIZE', 'message too long' ] ],
|
||||
[ -36, [ 'ENAMETOOLONG', 'name too long' ] ],
|
||||
[ -100, [ 'ENETDOWN', 'network is down' ] ],
|
||||
[ -101, [ 'ENETUNREACH', 'network is unreachable' ] ],
|
||||
[ -23, [ 'ENFILE', 'file table overflow' ] ],
|
||||
[ -105, [ 'ENOBUFS', 'no buffer space available' ] ],
|
||||
[ -19, [ 'ENODEV', 'no such device' ] ],
|
||||
[ -2, [ 'ENOENT', 'no such file or directory' ] ],
|
||||
[ -12, [ 'ENOMEM', 'not enough memory' ] ],
|
||||
[ -64, [ 'ENONET', 'machine is not on the network' ] ],
|
||||
[ -92, [ 'ENOPROTOOPT', 'protocol not available' ] ],
|
||||
[ -28, [ 'ENOSPC', 'no space left on device' ] ],
|
||||
[ -38, [ 'ENOSYS', 'function not implemented' ] ],
|
||||
[ -107, [ 'ENOTCONN', 'socket is not connected' ] ],
|
||||
[ -20, [ 'ENOTDIR', 'not a directory' ] ],
|
||||
[ -39, [ 'ENOTEMPTY', 'directory not empty' ] ],
|
||||
[ -88, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
|
||||
[ -95, [ 'ENOTSUP', 'operation not supported on socket' ] ],
|
||||
[ -75, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
|
||||
[ -1, [ 'EPERM', 'operation not permitted' ] ],
|
||||
[ -32, [ 'EPIPE', 'broken pipe' ] ],
|
||||
[ -71, [ 'EPROTO', 'protocol error' ] ],
|
||||
[ -93, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
|
||||
[ -91, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
|
||||
[ -34, [ 'ERANGE', 'result too large' ] ],
|
||||
[ -30, [ 'EROFS', 'read-only file system' ] ],
|
||||
[ -108, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
|
||||
[ -29, [ 'ESPIPE', 'invalid seek' ] ],
|
||||
[ -3, [ 'ESRCH', 'no such process' ] ],
|
||||
[ -110, [ 'ETIMEDOUT', 'connection timed out' ] ],
|
||||
[ -26, [ 'ETXTBSY', 'text file is busy' ] ],
|
||||
[ -18, [ 'EXDEV', 'cross-device link not permitted' ] ],
|
||||
[ -4094, [ 'UNKNOWN', 'unknown error' ] ],
|
||||
[ -4095, [ 'EOF', 'end of file' ] ],
|
||||
[ -6, [ 'ENXIO', 'no such device or address' ] ],
|
||||
[ -31, [ 'EMLINK', 'too many links' ] ],
|
||||
[ -112, [ 'EHOSTDOWN', 'host is down' ] ],
|
||||
[ -121, [ 'EREMOTEIO', 'remote I/O error' ] ],
|
||||
[ -25, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
|
||||
[ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
|
||||
[ -84, [ 'EILSEQ', 'illegal byte sequence' ] ],
|
||||
[ -94, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
|
||||
] as const);
|
||||
}
|
||||
|
||||
export function getWin32SystemErrorMap() {
|
||||
return new Map([
|
||||
[ -4093, [ 'E2BIG', 'argument list too long' ] ],
|
||||
[ -4092, [ 'EACCES', 'permission denied' ] ],
|
||||
[ -4091, [ 'EADDRINUSE', 'address already in use' ] ],
|
||||
[ -4090, [ 'EADDRNOTAVAIL', 'address not available' ] ],
|
||||
[ -4089, [ 'EAFNOSUPPORT', 'address family not supported' ] ],
|
||||
[ -4088, [ 'EAGAIN', 'resource temporarily unavailable' ] ],
|
||||
[ -3000, [ 'EAI_ADDRFAMILY', 'address family not supported' ] ],
|
||||
[ -3001, [ 'EAI_AGAIN', 'temporary failure' ] ],
|
||||
[ -3002, [ 'EAI_BADFLAGS', 'bad ai_flags value' ] ],
|
||||
[ -3013, [ 'EAI_BADHINTS', 'invalid value for hints' ] ],
|
||||
[ -3003, [ 'EAI_CANCELED', 'request canceled' ] ],
|
||||
[ -3004, [ 'EAI_FAIL', 'permanent failure' ] ],
|
||||
[ -3005, [ 'EAI_FAMILY', 'ai_family not supported' ] ],
|
||||
[ -3006, [ 'EAI_MEMORY', 'out of memory' ] ],
|
||||
[ -3007, [ 'EAI_NODATA', 'no address' ] ],
|
||||
[ -3008, [ 'EAI_NONAME', 'unknown node or service' ] ],
|
||||
[ -3009, [ 'EAI_OVERFLOW', 'argument buffer overflow' ] ],
|
||||
[ -3014, [ 'EAI_PROTOCOL', 'resolved protocol is unknown' ] ],
|
||||
[ -3010, [ 'EAI_SERVICE', 'service not available for socket type' ] ],
|
||||
[ -3011, [ 'EAI_SOCKTYPE', 'socket type not supported' ] ],
|
||||
[ -4084, [ 'EALREADY', 'connection already in progress' ] ],
|
||||
[ -4083, [ 'EBADF', 'bad file descriptor' ] ],
|
||||
[ -4082, [ 'EBUSY', 'resource busy or locked' ] ],
|
||||
[ -4081, [ 'ECANCELED', 'operation canceled' ] ],
|
||||
[ -4080, [ 'ECHARSET', 'invalid Unicode character' ] ],
|
||||
[ -4079, [ 'ECONNABORTED', 'software caused connection abort' ] ],
|
||||
[ -4078, [ 'ECONNREFUSED', 'connection refused' ] ],
|
||||
[ -4077, [ 'ECONNRESET', 'connection reset by peer' ] ],
|
||||
[ -4076, [ 'EDESTADDRREQ', 'destination address required' ] ],
|
||||
[ -4075, [ 'EEXIST', 'file already exists' ] ],
|
||||
[ -4074, [ 'EFAULT', 'bad address in system call argument' ] ],
|
||||
[ -4036, [ 'EFBIG', 'file too large' ] ],
|
||||
[ -4073, [ 'EHOSTUNREACH', 'host is unreachable' ] ],
|
||||
[ -4072, [ 'EINTR', 'interrupted system call' ] ],
|
||||
[ -4071, [ 'EINVAL', 'invalid argument' ] ],
|
||||
[ -4070, [ 'EIO', 'i/o error' ] ],
|
||||
[ -4069, [ 'EISCONN', 'socket is already connected' ] ],
|
||||
[ -4068, [ 'EISDIR', 'illegal operation on a directory' ] ],
|
||||
[ -4067, [ 'ELOOP', 'too many symbolic links encountered' ] ],
|
||||
[ -4066, [ 'EMFILE', 'too many open files' ] ],
|
||||
[ -4065, [ 'EMSGSIZE', 'message too long' ] ],
|
||||
[ -4064, [ 'ENAMETOOLONG', 'name too long' ] ],
|
||||
[ -4063, [ 'ENETDOWN', 'network is down' ] ],
|
||||
[ -4062, [ 'ENETUNREACH', 'network is unreachable' ] ],
|
||||
[ -4061, [ 'ENFILE', 'file table overflow' ] ],
|
||||
[ -4060, [ 'ENOBUFS', 'no buffer space available' ] ],
|
||||
[ -4059, [ 'ENODEV', 'no such device' ] ],
|
||||
[ -4058, [ 'ENOENT', 'no such file or directory' ] ],
|
||||
[ -4057, [ 'ENOMEM', 'not enough memory' ] ],
|
||||
[ -4056, [ 'ENONET', 'machine is not on the network' ] ],
|
||||
[ -4035, [ 'ENOPROTOOPT', 'protocol not available' ] ],
|
||||
[ -4055, [ 'ENOSPC', 'no space left on device' ] ],
|
||||
[ -4054, [ 'ENOSYS', 'function not implemented' ] ],
|
||||
[ -4053, [ 'ENOTCONN', 'socket is not connected' ] ],
|
||||
[ -4052, [ 'ENOTDIR', 'not a directory' ] ],
|
||||
[ -4051, [ 'ENOTEMPTY', 'directory not empty' ] ],
|
||||
[ -4050, [ 'ENOTSOCK', 'socket operation on non-socket' ] ],
|
||||
[ -4049, [ 'ENOTSUP', 'operation not supported on socket' ] ],
|
||||
[ -4026, [ 'EOVERFLOW', 'value too large for defined data type' ] ],
|
||||
[ -4048, [ 'EPERM', 'operation not permitted' ] ],
|
||||
[ -4047, [ 'EPIPE', 'broken pipe' ] ],
|
||||
[ -4046, [ 'EPROTO', 'protocol error' ] ],
|
||||
[ -4045, [ 'EPROTONOSUPPORT', 'protocol not supported' ] ],
|
||||
[ -4044, [ 'EPROTOTYPE', 'protocol wrong type for socket' ] ],
|
||||
[ -4034, [ 'ERANGE', 'result too large' ] ],
|
||||
[ -4043, [ 'EROFS', 'read-only file system' ] ],
|
||||
[ -4042, [ 'ESHUTDOWN', 'cannot send after transport endpoint shutdown' ] ],
|
||||
[ -4041, [ 'ESPIPE', 'invalid seek' ] ],
|
||||
[ -4040, [ 'ESRCH', 'no such process' ] ],
|
||||
[ -4039, [ 'ETIMEDOUT', 'connection timed out' ] ],
|
||||
[ -4038, [ 'ETXTBSY', 'text file is busy' ] ],
|
||||
[ -4037, [ 'EXDEV', 'cross-device link not permitted' ] ],
|
||||
[ -4094, [ 'UNKNOWN', 'unknown error' ] ],
|
||||
[ -4095, [ 'EOF', 'end of file' ] ],
|
||||
[ -4033, [ 'ENXIO', 'no such device or address' ] ],
|
||||
[ -4032, [ 'EMLINK', 'too many links' ] ],
|
||||
[ -4031, [ 'EHOSTDOWN', 'host is down' ] ],
|
||||
[ -4030, [ 'EREMOTEIO', 'remote I/O error' ] ],
|
||||
[ -4029, [ 'ENOTTY', 'inappropriate ioctl for device' ] ],
|
||||
[ -4028, [ 'EFTYPE', 'inappropriate file type or format' ] ],
|
||||
[ -4027, [ 'EILSEQ', 'illegal byte sequence' ] ],
|
||||
[ -4025, [ 'ESOCKTNOSUPPORT', 'socket type not supported' ] ]
|
||||
] as const);
|
||||
}
|
||||
|
||||
export function getPosixToWin32SystemErrorMap() {
|
||||
const posixEntries = [...getPosixSystemErrorMap().entries()];
|
||||
const win32Entries = [...getWin32SystemErrorMap().entries()];
|
||||
const map: Map<PosixErrNo, Win32ErrNo> = new Map();
|
||||
posixEntries.forEach(([code, val]) => {
|
||||
const found = win32Entries.find(([_, v]) => v[0] === val[0]);
|
||||
if (!found) console.error(val[0]);
|
||||
else map.set(code, found[0]);
|
||||
});
|
||||
return map;
|
||||
}
|
||||
|
||||
export function getPlatformSystemErrorFromPosix(posixErrNo: PosixErrNo) {
|
||||
if (process.platform === 'win32') {
|
||||
const win32errno = getPosixToWin32SystemErrorMap().get(posixErrNo)!;
|
||||
return getWin32SystemErrorMap().get(win32errno);
|
||||
} else {
|
||||
return getPosixSystemErrorMap().get(posixErrNo);
|
||||
}
|
||||
}
|
||||
|
||||
export class SystemError extends Error {
|
||||
constructor(errno: PosixErrNo, syscall?: string, errpath?: string) {
|
||||
const [errname, errmsg] = getPlatformSystemErrorFromPosix(errno) ?? ['SystemError', 'Unknown system error'];
|
||||
super(errmsg);
|
||||
this.name = errname;
|
||||
this.code = errname;
|
||||
this.errno = errno;
|
||||
if (syscall) this.syscall = syscall;
|
||||
if (errpath) this.path = errpath;
|
||||
}
|
||||
errno?: number | undefined;
|
||||
code?: string | undefined;
|
||||
path?: string | undefined;
|
||||
syscall?: string | undefined;
|
||||
}
|
||||
|
||||
export class NotImplementedError extends Error {
|
||||
constructor(thing: string, func: AnyCallable = NotImplementedError, overrideMsg: boolean = false) {
|
||||
super(overrideMsg ? thing : `A polyfill for ${thing} is not yet implemented by bun-polyfills.`);
|
||||
this.name = 'NotImplementedError';
|
||||
Error.captureStackTrace(this, func);
|
||||
}
|
||||
}
|
||||
36
packages/bun-polyfills/src/utils/misc.ts
Normal file
36
packages/bun-polyfills/src/utils/misc.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import streams from 'node:stream';
|
||||
import type { SpawnOptions, FileBlob } from 'bun';
|
||||
|
||||
export const getter = <T>(obj: T, key: string | symbol, get: () => any, enumerable = false, configurable = true): void => {
|
||||
Object.defineProperty(obj, key, { get, configurable, enumerable });
|
||||
};
|
||||
|
||||
export const setter = <T>(obj: T, key: string | symbol, set: () => any, enumerable = false, configurable = true): void => {
|
||||
Object.defineProperty(obj, key, { set, configurable, enumerable });
|
||||
};
|
||||
|
||||
export const readonly = <T>(obj: T, key: string | symbol, value: unknown, enumerable = false, configurable = true): void => {
|
||||
Object.defineProperty(obj, key, { value, configurable, enumerable });
|
||||
};
|
||||
|
||||
export function streamToBuffer(stream: streams.Readable | streams.Duplex): Promise<Buffer> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const buffers: Uint8Array[] = [];
|
||||
stream.on("data", (chunk: Uint8Array) => buffers.push(chunk));
|
||||
stream.on("end", () => resolve(Buffer.concat(buffers)));
|
||||
stream.on("error", (err: Error) => reject(err));
|
||||
});
|
||||
}
|
||||
|
||||
export function isArrayBufferView(value: any): value is ArrayBufferView {
|
||||
return value !== null && typeof value === 'object' &&
|
||||
value.buffer instanceof ArrayBuffer && typeof value.byteLength === 'number' && typeof value.byteOffset === 'number';
|
||||
}
|
||||
|
||||
export function isOptions(options: any): options is SpawnOptions.OptionsObject {
|
||||
return options !== null && typeof options === 'object';
|
||||
}
|
||||
|
||||
export function isFileBlob(blob: any): blob is FileBlob {
|
||||
return blob instanceof Blob && Reflect.get(blob, 'readable') instanceof ReadableStream && typeof Reflect.get(blob, 'writer') === 'function';
|
||||
}
|
||||
41
packages/bun-polyfills/tools/updateversions.ts
Normal file
41
packages/bun-polyfills/tools/updateversions.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import path from 'path';
|
||||
|
||||
const abort = (...msg: string[]): never => (console.error(...msg), process.exit(1));
|
||||
|
||||
const makefilePath = path.resolve(import.meta.dir, '../../../Makefile');
|
||||
const makefile = Bun.file(makefilePath);
|
||||
if (!await makefile.exists()) abort('Makefile not found at', makefilePath);
|
||||
|
||||
const makefileContent = await makefile.text();
|
||||
const matched = makefileContent.match(/^BUN_BASE_VERSION\s*=\s*(\d+.\d+)/m);
|
||||
if (!matched) abort('Could not find BUN_BASE_VERSION in Makefile');
|
||||
|
||||
const buildidPath = path.resolve(import.meta.dir, '../../../src/build-id');
|
||||
const buildid = Bun.file(buildidPath);
|
||||
if (!await buildid.exists()) abort('Build ID file not found at', buildidPath);
|
||||
|
||||
const [, BUN_BASE_VERSION] = matched!;
|
||||
const BUN_VERSION = `${BUN_BASE_VERSION}.${await buildid.text()}`.trim();
|
||||
|
||||
const bunTsPath = path.resolve(import.meta.dir, '../src/modules/bun.ts');
|
||||
const bunTs = Bun.file(bunTsPath);
|
||||
if (!await bunTs.exists()) abort('bun.ts source file not found at', bunTsPath);
|
||||
|
||||
const bunTsContent = await bunTs.text();
|
||||
const bunTsContentNew = bunTsContent.replace(
|
||||
/^export const version = '.+' satisfies typeof Bun.version;$/m,
|
||||
`export const version = '${BUN_VERSION}' satisfies typeof Bun.version;`
|
||||
);
|
||||
if (bunTsContentNew !== bunTsContent) console.info('Updated Bun.version polyfill to', BUN_VERSION);
|
||||
|
||||
const git = Bun.spawnSync({ cmd: ['git', 'rev-parse', 'HEAD'] });
|
||||
if (!git.success) abort('Could not get git HEAD commit hash');
|
||||
const BUN_REVISION = git.stdout.toString('utf8').trim();
|
||||
|
||||
const bunTsContentNewer = bunTsContentNew.replace(
|
||||
/^export const revision = '.+' satisfies typeof Bun.revision;$/m,
|
||||
`export const revision = '${BUN_REVISION}' satisfies typeof Bun.revision;`
|
||||
);
|
||||
if (bunTsContentNewer !== bunTsContentNew) console.info('Updated Bun.revision polyfill to', BUN_REVISION);
|
||||
|
||||
Bun.write(bunTs, bunTsContentNewer);
|
||||
19
packages/bun-polyfills/tsconfig.json
Normal file
19
packages/bun-polyfills/tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"moduleDetection": "force",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": [".", "../bun-types/index.d.ts"],
|
||||
}
|
||||
55
packages/bun-types/bun-test.d.ts
vendored
55
packages/bun-types/bun-test.d.ts
vendored
@@ -169,6 +169,25 @@ declare module "bun:test" {
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
skipIf(condition: boolean): (label: string, fn: () => void) => void;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
each<T extends ReadonlyArray<unknown>>(
|
||||
table: ReadonlyArray<T>,
|
||||
): (
|
||||
label: string,
|
||||
fn: (...args: T) => void | Promise<unknown>,
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
each<T>(
|
||||
table: ReadonlyArray<T>,
|
||||
): (
|
||||
label: string,
|
||||
fn: (arg: T) => void | Promise<unknown>,
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
};
|
||||
/**
|
||||
* Describes a group of related tests.
|
||||
@@ -395,6 +414,25 @@ declare module "bun:test" {
|
||||
| ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
each<T extends ReadonlyArray<unknown>>(
|
||||
table: ReadonlyArray<T>,
|
||||
): (
|
||||
label: string,
|
||||
fn: (...args: T) => void | Promise<unknown>,
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
each<T>(
|
||||
table: ReadonlyArray<T>,
|
||||
): (
|
||||
label: string,
|
||||
fn: (arg: T, done: (err?: unknown) => void) => void | Promise<unknown>,
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
};
|
||||
/**
|
||||
* Runs a test.
|
||||
@@ -981,6 +1019,23 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring
|
||||
*/
|
||||
toInclude(expected: string): void;
|
||||
/**
|
||||
* Asserts that a value includes a `string` {times} times.
|
||||
* @param expected the expected substring
|
||||
* @param times the number of times the substring should occur
|
||||
*/
|
||||
toIncludeRepeated(expected: string, times: number): void;
|
||||
/**
|
||||
* Checks whether a value satisfies a custom condition.
|
||||
* @param {Function} predicate - The custom condition to be satisfied. It should be a function that takes a value as an argument (in this case the value from expect) and returns a boolean.
|
||||
* @example
|
||||
* expect(1).toSatisfy((val) => val > 0);
|
||||
* expect("foo").toSatisfy((val) => val === "foo");
|
||||
* expect("bar").not.toSatisfy((val) => val === "bun");
|
||||
* @link https://vitest.dev/api/expect.html#tosatisfy
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/toSatisfy
|
||||
*/
|
||||
toSatisfy(predicate: (value: T) => boolean): void;
|
||||
/**
|
||||
* Asserts that a value starts with a `string`.
|
||||
*
|
||||
|
||||
63
packages/bun-types/bun.d.ts
vendored
63
packages/bun-types/bun.d.ts
vendored
@@ -24,7 +24,7 @@ declare module "bun" {
|
||||
import { Encoding as CryptoEncoding } from "crypto";
|
||||
|
||||
export interface Env extends Dict<string>, NodeJS.ProcessEnv {
|
||||
NODE_ENV: string;
|
||||
NODE_ENV?: string;
|
||||
|
||||
/**
|
||||
* The timezone used by Intl, Date, etc.
|
||||
@@ -68,7 +68,7 @@ declare module "bun" {
|
||||
export function which(
|
||||
command: string,
|
||||
options?: { PATH?: string; cwd?: string },
|
||||
): string;
|
||||
): string | null;
|
||||
|
||||
export type Serve<WebSocketDataType = undefined> =
|
||||
| ServeOptions
|
||||
@@ -752,39 +752,40 @@ declare module "bun" {
|
||||
*/
|
||||
export const hash: ((
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
seed?: number | bigint,
|
||||
) => number | bigint) &
|
||||
Hash;
|
||||
|
||||
interface Hash {
|
||||
wyhash: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
crc32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
seed?: bigint,
|
||||
) => bigint;
|
||||
adler32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
) => number;
|
||||
crc32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
) => number;
|
||||
cityHash32: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
) => number;
|
||||
cityHash64: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
seed?: bigint,
|
||||
) => bigint;
|
||||
murmur32v3: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
murmur64v2: (
|
||||
) => number;
|
||||
murmur32v2: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: number,
|
||||
) => number | bigint;
|
||||
) => number;
|
||||
murmur64v2: (
|
||||
data: string | ArrayBufferView | ArrayBuffer | SharedArrayBuffer,
|
||||
seed?: bigint,
|
||||
) => bigint;
|
||||
}
|
||||
|
||||
export type JavaScriptLoader = "jsx" | "js" | "ts" | "tsx";
|
||||
@@ -2294,6 +2295,12 @@ declare module "bun" {
|
||||
*/
|
||||
export function allocUnsafe(size: number): Uint8Array;
|
||||
|
||||
export interface BunInspectOptions {
|
||||
colors?: boolean;
|
||||
depth?: number;
|
||||
sorted?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pretty-print an object the same as {@link console.log} to a `string`
|
||||
*
|
||||
@@ -2301,7 +2308,13 @@ declare module "bun" {
|
||||
*
|
||||
* @param args
|
||||
*/
|
||||
export function inspect(...args: any): string;
|
||||
export function inspect(arg: any, options?: BunInspectOptions): string;
|
||||
export namespace inspect {
|
||||
/**
|
||||
* That can be used to declare custom inspect functions.
|
||||
*/
|
||||
const custom: typeof import("util").inspect.custom;
|
||||
}
|
||||
|
||||
interface MMapOptions {
|
||||
/**
|
||||
@@ -2428,8 +2441,8 @@ declare module "bun" {
|
||||
* If you have any ideas, please file an issue https://github.com/oven-sh/bun
|
||||
*/
|
||||
interface HeapSnapshot {
|
||||
/** "2" */
|
||||
version: string;
|
||||
/** 2 */
|
||||
version: number;
|
||||
|
||||
/** "Inspector" */
|
||||
type: string;
|
||||
@@ -2675,7 +2688,7 @@ declare module "bun" {
|
||||
* openssl sha512-256 /path/to/file
|
||||
*```
|
||||
*/
|
||||
export function sha(input: StringOrBuffer, hashInto?: Uint8Array): Uint8Array;
|
||||
export function sha(input: StringOrBuffer, hashInto?: TypedArray): TypedArray;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -3622,16 +3635,16 @@ declare module "bun" {
|
||||
>
|
||||
: SyncSubprocess<Readable, Readable>;
|
||||
|
||||
type ReadableIO = ReadableStream<Buffer> | number | undefined;
|
||||
type ReadableIO = ReadableStream<Uint8Array> | number | undefined;
|
||||
|
||||
type ReadableToIO<X extends Readable> = X extends "pipe" | undefined
|
||||
? ReadableStream<Buffer>
|
||||
? ReadableStream<Uint8Array>
|
||||
: X extends BunFile | ArrayBufferView | number
|
||||
? number
|
||||
: undefined;
|
||||
|
||||
type ReadableToSyncIO<X extends Readable> = X extends "pipe" | undefined
|
||||
? Buffer
|
||||
? Uint8Array
|
||||
: undefined;
|
||||
|
||||
type WritableIO = FileSink | number | undefined;
|
||||
|
||||
262
packages/bun-types/ffi.d.ts
vendored
262
packages/bun-types/ffi.d.ts
vendored
@@ -350,7 +350,7 @@ declare module "bun:ffi" {
|
||||
type UNTYPED = never;
|
||||
export type Pointer = number & {};
|
||||
|
||||
interface FFITypeToType {
|
||||
interface FFITypeToArgsType {
|
||||
[FFIType.char]: number;
|
||||
[FFIType.int8_t]: number;
|
||||
[FFIType.i8]: number;
|
||||
@@ -365,22 +365,54 @@ declare module "bun:ffi" {
|
||||
[FFIType.int]: number;
|
||||
[FFIType.uint32_t]: number;
|
||||
[FFIType.u32]: number;
|
||||
[FFIType.int64_t]: number;
|
||||
[FFIType.i64]: number;
|
||||
[FFIType.uint64_t]: number;
|
||||
[FFIType.u64]: number;
|
||||
[FFIType.int64_t]: number | bigint;
|
||||
[FFIType.i64]: number | bigint;
|
||||
[FFIType.uint64_t]: number | bigint;
|
||||
[FFIType.u64]: number | bigint;
|
||||
[FFIType.double]: number;
|
||||
[FFIType.f64]: number;
|
||||
[FFIType.float]: number;
|
||||
[FFIType.f32]: number;
|
||||
[FFIType.bool]: boolean;
|
||||
[FFIType.ptr]: Pointer;
|
||||
[FFIType.pointer]: Pointer;
|
||||
[FFIType.ptr]: TypedArray | Pointer | CString | null;
|
||||
[FFIType.pointer]: TypedArray | Pointer | CString | null;
|
||||
[FFIType.void]: void;
|
||||
[FFIType.cstring]: TypedArray | Pointer | CString | null;
|
||||
[FFIType.i64_fast]: number | bigint;
|
||||
[FFIType.u64_fast]: number | bigint;
|
||||
[FFIType.function]: Pointer | JSCallback; // cannot be null
|
||||
}
|
||||
interface FFITypeToReturnsType {
|
||||
[FFIType.char]: number;
|
||||
[FFIType.int8_t]: number;
|
||||
[FFIType.i8]: number;
|
||||
[FFIType.uint8_t]: number;
|
||||
[FFIType.u8]: number;
|
||||
[FFIType.int16_t]: number;
|
||||
[FFIType.i16]: number;
|
||||
[FFIType.uint16_t]: number;
|
||||
[FFIType.u16]: number;
|
||||
[FFIType.int32_t]: number;
|
||||
[FFIType.i32]: number;
|
||||
[FFIType.int]: number;
|
||||
[FFIType.uint32_t]: number;
|
||||
[FFIType.u32]: number;
|
||||
[FFIType.int64_t]: bigint;
|
||||
[FFIType.i64]: bigint;
|
||||
[FFIType.uint64_t]: bigint;
|
||||
[FFIType.u64]: bigint;
|
||||
[FFIType.double]: number;
|
||||
[FFIType.f64]: number;
|
||||
[FFIType.float]: number;
|
||||
[FFIType.f32]: number;
|
||||
[FFIType.bool]: boolean;
|
||||
[FFIType.ptr]: Pointer | null;
|
||||
[FFIType.pointer]: Pointer | null;
|
||||
[FFIType.void]: void;
|
||||
[FFIType.cstring]: CString;
|
||||
[FFIType.i64_fast]: number | bigint;
|
||||
[FFIType.u64_fast]: number | bigint;
|
||||
[FFIType.function]: (...args: any[]) => any;
|
||||
[FFIType.function]: Pointer | null;
|
||||
}
|
||||
interface FFITypeStringToType {
|
||||
["char"]: FFIType.char;
|
||||
@@ -417,36 +449,7 @@ declare module "bun:ffi" {
|
||||
|
||||
export type FFITypeOrString =
|
||||
| FFIType
|
||||
| "char"
|
||||
| "int8_t"
|
||||
| "i8"
|
||||
| "uint8_t"
|
||||
| "u8"
|
||||
| "int16_t"
|
||||
| "i16"
|
||||
| "uint16_t"
|
||||
| "u16"
|
||||
| "int32_t"
|
||||
| "i32"
|
||||
| "int"
|
||||
| "uint32_t"
|
||||
| "u32"
|
||||
| "int64_t"
|
||||
| "i64"
|
||||
| "uint64_t"
|
||||
| "u64"
|
||||
| "double"
|
||||
| "f64"
|
||||
| "float"
|
||||
| "f32"
|
||||
| "bool"
|
||||
| "ptr"
|
||||
| "pointer"
|
||||
| "void"
|
||||
| "cstring"
|
||||
| "function"
|
||||
| "usize"
|
||||
| "callback";
|
||||
| keyof FFITypeStringToType;
|
||||
|
||||
interface FFIFunction {
|
||||
/**
|
||||
@@ -477,7 +480,7 @@ declare module "bun:ffi" {
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
args?: FFITypeOrString[];
|
||||
readonly args?: readonly FFITypeOrString[];
|
||||
/**
|
||||
* Return type to a FFI function (C ABI)
|
||||
*
|
||||
@@ -505,7 +508,7 @@ declare module "bun:ffi" {
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
returns?: FFITypeOrString;
|
||||
readonly returns?: FFITypeOrString;
|
||||
|
||||
/**
|
||||
* Function pointer to the native function
|
||||
@@ -516,7 +519,7 @@ declare module "bun:ffi" {
|
||||
* This is useful if the library has already been loaded
|
||||
* or if the module is also using Node-API.
|
||||
*/
|
||||
ptr?: number | bigint;
|
||||
readonly ptr?: number | bigint;
|
||||
|
||||
/**
|
||||
* Can C/FFI code call this function from a separate thread?
|
||||
@@ -533,10 +536,10 @@ declare module "bun:ffi" {
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
threadsafe?: boolean;
|
||||
readonly threadsafe?: boolean;
|
||||
}
|
||||
|
||||
type Symbols = Record<string, FFIFunction>;
|
||||
type Symbols = Readonly<Record<string, FFIFunction>>;
|
||||
|
||||
// /**
|
||||
// * Compile a callback function
|
||||
@@ -546,7 +549,7 @@ declare module "bun:ffi" {
|
||||
// */
|
||||
// export function callback(ffi: FFIFunction, cb: Function): number;
|
||||
|
||||
export interface Library<Fns extends Record<string, Narrow<FFIFunction>>> {
|
||||
export interface Library<Fns extends Readonly<Record<string, Narrow<FFIFunction>>>> {
|
||||
symbols: ConvertFns<Fns>;
|
||||
|
||||
/**
|
||||
@@ -577,12 +580,14 @@ declare module "bun:ffi" {
|
||||
| (T extends object ? { [K in keyof T]: Narrow<T[K]> } : never)
|
||||
| Extract<{} | null | undefined, T>;
|
||||
|
||||
type ConvertFns<Fns extends Record<string, FFIFunction>> = {
|
||||
type ConvertFns<Fns extends Readonly<Record<string, FFIFunction>>> = {
|
||||
[K in keyof Fns]: (
|
||||
...args: Fns[K]["args"] extends infer A extends FFITypeOrString[]
|
||||
? { [L in keyof A]: FFITypeToType[ToFFIType<A[L]>] }
|
||||
: never
|
||||
) => FFITypeToType[ToFFIType<NonNullable<Fns[K]["returns"]>>];
|
||||
...args: Fns[K]["args"] extends infer A extends readonly FFITypeOrString[]
|
||||
? { [L in keyof A]: FFITypeToArgsType[ToFFIType<A[L]>] }
|
||||
: [unknown] extends [Fns[K]["args"]] ? [] : never
|
||||
) => [unknown] extends [Fns[K]["returns"]]
|
||||
? void
|
||||
: FFITypeToReturnsType[ToFFIType<NonNullable<Fns[K]["returns"]>>];
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -750,6 +755,165 @@ declare module "bun:ffi" {
|
||||
byteLength?: number,
|
||||
): ArrayBuffer;
|
||||
|
||||
export namespace read {
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function u8(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function i8(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function u16(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function i16(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function u32(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function i32(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function f32(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function u64(ptr: Pointer, byteOffset?: number): bigint;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function i64(ptr: Pointer, byteOffset?: number): bigint;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function f64(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function ptr(ptr: Pointer, byteOffset?: number): number;
|
||||
/**
|
||||
* The read function behaves similarly to DataView,
|
||||
* but it's usually faster because it doesn't need to create a DataView or ArrayBuffer.
|
||||
*
|
||||
* @param ptr The memory address to read
|
||||
* @param byteOffset bytes to skip before reading
|
||||
*
|
||||
* While there are some checks to catch invalid pointers, this is a difficult
|
||||
* thing to do safely. Passing an invalid pointer can crash the program and
|
||||
* reading beyond the bounds of the pointer will crash the program or cause
|
||||
* undefined behavior. Use with care!
|
||||
*/
|
||||
export function intptr(ptr: Pointer, byteOffset?: number): number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the pointer backing a {@link TypedArray} or {@link ArrayBuffer}
|
||||
*
|
||||
|
||||
210
packages/bun-types/globals.d.ts
vendored
210
packages/bun-types/globals.d.ts
vendored
@@ -373,8 +373,171 @@ declare type MessageChannel = import("worker_threads").MessageChannel;
|
||||
declare var BroadcastChannel: typeof import("worker_threads").BroadcastChannel;
|
||||
declare type BroadcastChannel = import("worker_threads").BroadcastChannel;
|
||||
|
||||
declare var Worker: typeof import("worker_threads").Worker;
|
||||
declare type Worker = typeof import("worker_threads").Worker;
|
||||
interface AbstractWorkerEventMap {
|
||||
error: ErrorEvent;
|
||||
}
|
||||
|
||||
interface WorkerEventMap extends AbstractWorkerEventMap {
|
||||
message: MessageEvent;
|
||||
messageerror: MessageEvent;
|
||||
close: CloseEvent;
|
||||
open: Event;
|
||||
}
|
||||
|
||||
interface AbstractWorker {
|
||||
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/ServiceWorker/error_event) */
|
||||
onerror: ((this: AbstractWorker, ev: ErrorEvent) => any) | null;
|
||||
addEventListener<K extends keyof AbstractWorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: AbstractWorker, ev: AbstractWorkerEventMap[K]) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
removeEventListener<K extends keyof AbstractWorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: AbstractWorker, ev: AbstractWorkerEventMap[K]) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bun's Web Worker constructor supports some extra options on top of the API browsers have.
|
||||
*/
|
||||
interface WorkerOptions {
|
||||
/**
|
||||
* A string specifying an identifying name for the DedicatedWorkerGlobalScope representing the scope of
|
||||
* the worker, which is mainly useful for debugging purposes.
|
||||
*/
|
||||
name?: string;
|
||||
|
||||
/**
|
||||
* Use less memory, but make the worker slower.
|
||||
*
|
||||
* Internally, this sets the heap size configuration in JavaScriptCore to be
|
||||
* the small heap instead of the large heap.
|
||||
*/
|
||||
smol?: boolean;
|
||||
|
||||
/**
|
||||
* When `true`, the worker will keep the parent thread alive until the worker is terminated or `unref`'d.
|
||||
* When `false`, the worker will not keep the parent thread alive.
|
||||
*
|
||||
* By default, this is `false`.
|
||||
*/
|
||||
ref?: boolean;
|
||||
|
||||
/**
|
||||
* In Bun, this does nothing.
|
||||
*/
|
||||
type?: string;
|
||||
|
||||
/**
|
||||
* List of arguments which would be stringified and appended to
|
||||
* `Bun.argv` / `process.argv` in the worker. This is mostly similar to the `data`
|
||||
* but the values will be available on the global `Bun.argv` as if they
|
||||
* were passed as CLI options to the script.
|
||||
*/
|
||||
// argv?: any[] | undefined;
|
||||
|
||||
/** If `true` and the first argument is a string, interpret the first argument to the constructor as a script that is executed once the worker is online. */
|
||||
// eval?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* If set, specifies the initial value of process.env inside the Worker thread. As a special value, worker.SHARE_ENV may be used to specify that the parent thread and the child thread should share their environment variables; in that case, changes to one thread's process.env object affect the other thread as well. Default: process.env.
|
||||
*/
|
||||
env?:
|
||||
| Record<string, string>
|
||||
| typeof import("node:worker_threads")["SHARE_ENV"]
|
||||
| undefined;
|
||||
|
||||
/**
|
||||
* In Bun, this does nothing.
|
||||
*/
|
||||
credentials?: string;
|
||||
|
||||
/**
|
||||
* @default true
|
||||
*/
|
||||
// trackUnmanagedFds?: boolean;
|
||||
|
||||
// resourceLimits?: import("worker_threads").ResourceLimits;
|
||||
}
|
||||
|
||||
interface Worker extends EventTarget, AbstractWorker {
|
||||
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Worker/message_event) */
|
||||
onmessage: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
/** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Worker/messageerror_event) */
|
||||
onmessageerror: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
/**
|
||||
* Clones message and transmits it to worker's global environment. transfer can be passed as a list of objects that are to be transferred rather than cloned.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Worker/postMessage)
|
||||
*/
|
||||
postMessage(message: any, transfer: Transferable[]): void;
|
||||
postMessage(message: any, options?: StructuredSerializeOptions): void;
|
||||
/**
|
||||
* Aborts worker's associated global environment.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/Worker/terminate)
|
||||
*/
|
||||
terminate(): void;
|
||||
addEventListener<K extends keyof WorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: Worker, ev: WorkerEventMap[K]) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
addEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
removeEventListener<K extends keyof WorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: Worker, ev: WorkerEventMap[K]) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
removeEventListener(
|
||||
type: string,
|
||||
listener: EventListenerOrEventListenerObject,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Opposite of `unref()`, calling `ref()` on a previously `unref()`ed worker does _not_ let the program exit if it's the only active handle left (the default
|
||||
* behavior). If the worker is `ref()`ed, calling `ref()` again has
|
||||
* no effect.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
ref(): void;
|
||||
/**
|
||||
* Calling `unref()` on a worker allows the thread to exit if this is the only
|
||||
* active handle in the event system. If the worker is already `unref()`ed calling`unref()` again has no effect.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
unref(): void;
|
||||
|
||||
threadId: number;
|
||||
}
|
||||
|
||||
declare var Worker: {
|
||||
prototype: Worker;
|
||||
new (scriptURL: string | URL, options?: WorkerOptions): Worker;
|
||||
/**
|
||||
* This is the cloned value of the `data` property passed to `new Worker()`
|
||||
*
|
||||
* This is Bun's equivalent of `workerData` in Node.js.
|
||||
*/
|
||||
data: any;
|
||||
};
|
||||
|
||||
interface EncodeIntoResult {
|
||||
/**
|
||||
@@ -439,9 +602,9 @@ interface Process {
|
||||
getgroups: () => number[];
|
||||
// setgroups?: (groups: ReadonlyArray<string | number>) => void;
|
||||
dlopen(module: { exports: any }, filename: string, flags?: number): void;
|
||||
stdin: import("stream").Duplex & { isTTY: boolean };
|
||||
stdout: import("stream").Writable & { isTTY: boolean };
|
||||
stderr: import("stream").Writable & { isTTY: boolean };
|
||||
stdin: import("tty").ReadStream;
|
||||
stdout: import("tty").WriteStream;
|
||||
stderr: import("tty").WriteStream;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -576,11 +739,10 @@ interface BlobInterface {
|
||||
|
||||
type BlobPart = string | Blob | BufferSource;
|
||||
interface BlobPropertyBag {
|
||||
/** Set a default "type" */
|
||||
type?: string;
|
||||
|
||||
/** Set a default "type". Not yet implemented. */
|
||||
// type?: string;
|
||||
/** Not implemented in Bun yet. */
|
||||
endings?: "transparent" | "native";
|
||||
// endings?: "transparent" | "native";
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -664,7 +826,7 @@ type ResponseType =
|
||||
| "opaque"
|
||||
| "opaqueredirect";
|
||||
|
||||
type FormDataEntryValue = Blob | string;
|
||||
type FormDataEntryValue = File | string;
|
||||
|
||||
/** Provides a way to easily construct a set of key/value pairs representing
|
||||
* form fields and their values, which can then be easily sent using the
|
||||
@@ -796,6 +958,28 @@ declare var Blob: {
|
||||
new (parts?: BlobPart[], options?: BlobPropertyBag): Blob;
|
||||
};
|
||||
|
||||
interface File extends Blob {
|
||||
readonly lastModified: number;
|
||||
readonly name: string;
|
||||
}
|
||||
|
||||
declare var File: {
|
||||
prototype: File;
|
||||
|
||||
/**
|
||||
* Create a new [File](https://developer.mozilla.org/en-US/docs/Web/API/File)
|
||||
*
|
||||
* @param `parts` - An array of strings, numbers, BufferSource, or [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) objects
|
||||
* @param `name` - The name of the file
|
||||
* @param `options` - An object containing properties to be added to the [File](https://developer.mozilla.org/en-US/docs/Web/API/File)
|
||||
*/
|
||||
new (
|
||||
parts: BlobPart[],
|
||||
name: string,
|
||||
options?: BlobPropertyBag & { lastModified?: Date | number },
|
||||
): File;
|
||||
};
|
||||
|
||||
interface ResponseInit {
|
||||
headers?: HeadersInit;
|
||||
/** @default 200 */
|
||||
@@ -2485,10 +2669,6 @@ interface ReadableStream<R = any> {
|
||||
options?: StreamPipeOptions,
|
||||
): Promise<void>;
|
||||
tee(): [ReadableStream<R>, ReadableStream<R>];
|
||||
forEach(
|
||||
callbackfn: (value: any, key: number, parent: ReadableStream<R>) => void,
|
||||
thisArg?: any,
|
||||
): void;
|
||||
[Symbol.asyncIterator](): AsyncIterableIterator<R>;
|
||||
values(options?: { preventCancel: boolean }): AsyncIterableIterator<R>;
|
||||
}
|
||||
@@ -3492,7 +3672,7 @@ declare module "*.txt" {
|
||||
}
|
||||
|
||||
declare module "*.toml" {
|
||||
var contents: unknown;
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
|
||||
8
packages/bun-types/html-rewriter.d.ts
vendored
8
packages/bun-types/html-rewriter.d.ts
vendored
@@ -50,6 +50,14 @@ declare namespace HTMLRewriterTypes {
|
||||
tagName: string;
|
||||
readonly attributes: IterableIterator<string[]>;
|
||||
readonly removed: boolean;
|
||||
/** Whether the element is explicitly self-closing, e.g. `<foo />` */
|
||||
readonly selfClosing: boolean;
|
||||
/**
|
||||
* Whether the element can have inner content. Returns `true` unless
|
||||
* - the element is an [HTML void element](https://html.spec.whatwg.org/multipage/syntax.html#void-elements)
|
||||
* - or it's self-closing in a foreign context (eg. in SVG, MathML).
|
||||
*/
|
||||
readonly canHaveContent: boolean;
|
||||
readonly namespaceURI: string;
|
||||
getAttribute(name: string): string | null;
|
||||
hasAttribute(name: string): boolean;
|
||||
|
||||
2
packages/bun-types/module.d.ts
vendored
2
packages/bun-types/module.d.ts
vendored
@@ -13,4 +13,6 @@ declare module "module" {
|
||||
* Bun's module cache is not exposed but this property exists for compatibility.
|
||||
*/
|
||||
export var _cache: {};
|
||||
|
||||
export var builtinModules: string[];
|
||||
}
|
||||
|
||||
15
packages/bun-types/perf_hooks.d.ts
vendored
15
packages/bun-types/perf_hooks.d.ts
vendored
@@ -145,16 +145,19 @@ declare module "perf_hooks" {
|
||||
// */
|
||||
// readonly v8Start: number;
|
||||
// }
|
||||
// interface EventLoopUtilization {
|
||||
// idle: number;
|
||||
// active: number;
|
||||
// utilization: number;
|
||||
// }
|
||||
interface EventLoopUtilization {
|
||||
idle: number;
|
||||
active: number;
|
||||
utilization: number;
|
||||
}
|
||||
// /**
|
||||
// * @param util1 The result of a previous call to eventLoopUtilization()
|
||||
// * @param util2 The result of a previous call to eventLoopUtilization() prior to util1
|
||||
// */
|
||||
// type EventLoopUtilityFunction = (util1?: EventLoopUtilization, util2?: EventLoopUtilization) => EventLoopUtilization;
|
||||
type EventLoopUtilityFunction = (
|
||||
util1?: EventLoopUtilization,
|
||||
util2?: EventLoopUtilization,
|
||||
) => EventLoopUtilization;
|
||||
// interface MarkOptions {
|
||||
// /**
|
||||
// * Additional optional detail to include with the mark.
|
||||
|
||||
@@ -4,6 +4,8 @@ import {
|
||||
suffix,
|
||||
CString,
|
||||
Pointer,
|
||||
JSCallback,
|
||||
read,
|
||||
// FFIFunction,
|
||||
// ConvertFns,
|
||||
// Narrow,
|
||||
@@ -27,6 +29,14 @@ const lib = dlopen(
|
||||
args: [FFIType.i32, FFIType.i32],
|
||||
returns: FFIType.i32,
|
||||
},
|
||||
ptr_type: {
|
||||
args: [FFIType.pointer],
|
||||
returns: FFIType.pointer,
|
||||
},
|
||||
fn_type: {
|
||||
args: [FFIType.function],
|
||||
returns: FFIType.function,
|
||||
},
|
||||
allArgs: {
|
||||
args: [
|
||||
FFIType.char, // string
|
||||
@@ -67,6 +77,17 @@ const lib = dlopen(
|
||||
tsd.expectType<CString>(lib.symbols.sqlite3_libversion());
|
||||
tsd.expectType<number>(lib.symbols.add(1, 2));
|
||||
|
||||
tsd.expectType<Pointer | null>(lib.symbols.ptr_type(0));
|
||||
tc.assert<
|
||||
tc.IsExact<
|
||||
(typeof lib)["symbols"]["ptr_type"],
|
||||
TypedArray | Pointer | CString
|
||||
>
|
||||
>;
|
||||
|
||||
tsd.expectType<Pointer | null>(lib.symbols.fn_type(0));
|
||||
tc.assert<tc.IsExact<(typeof lib)["symbols"]["fn_type"], Pointer | JSCallback>>;
|
||||
|
||||
tc.assert<
|
||||
tc.IsExact<
|
||||
(typeof lib)["symbols"]["allArgs"],
|
||||
@@ -103,3 +124,41 @@ tc.assert<
|
||||
]
|
||||
>
|
||||
>;
|
||||
|
||||
const as_const_test = {
|
||||
sqlite3_libversion: {
|
||||
args: [],
|
||||
returns: FFIType.cstring,
|
||||
},
|
||||
multi_args: {
|
||||
args: [FFIType.i32, FFIType.f32],
|
||||
returns: FFIType.void,
|
||||
},
|
||||
no_returns: {
|
||||
args: [FFIType.i32],
|
||||
},
|
||||
no_args: {
|
||||
returns: FFIType.i32,
|
||||
},
|
||||
} as const;
|
||||
|
||||
const lib2 = dlopen(path, as_const_test);
|
||||
|
||||
tsd.expectType<CString>(lib2.symbols.sqlite3_libversion());
|
||||
tsd.expectType<void>(lib2.symbols.multi_args(1, 2));
|
||||
tc.assert<tc.IsExact<ReturnType<(typeof lib2)["symbols"]["no_returns"]>, void>>;
|
||||
tc.assert<tc.IsExact<Parameters<(typeof lib2)["symbols"]["no_args"]>, []>>;
|
||||
|
||||
tsd.expectType<number>(read.u8(0));
|
||||
tsd.expectType<number>(read.u8(0, 0));
|
||||
tsd.expectType<number>(read.i8(0, 0));
|
||||
tsd.expectType<number>(read.u16(0, 0));
|
||||
tsd.expectType<number>(read.i16(0, 0));
|
||||
tsd.expectType<number>(read.u32(0, 0));
|
||||
tsd.expectType<number>(read.i32(0, 0));
|
||||
tsd.expectType<bigint>(read.u64(0, 0));
|
||||
tsd.expectType<bigint>(read.i64(0, 0));
|
||||
tsd.expectType<number>(read.f32(0, 0));
|
||||
tsd.expectType<number>(read.f64(0, 0));
|
||||
tsd.expectType<number>(read.ptr(0, 0));
|
||||
tsd.expectType<number>(read.intptr(0, 0));
|
||||
|
||||
@@ -1 +1 @@
|
||||
Bun.hash.wyhash("asdf", 1234);
|
||||
const hash: bigint = Bun.hash.wyhash("asdf", 1234n);
|
||||
|
||||
49
packages/bun-types/tests/tty.test-d.ts
Normal file
49
packages/bun-types/tests/tty.test-d.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import * as tty from "tty";
|
||||
|
||||
const rs = new tty.ReadStream(234, {
|
||||
allowHalfOpen: true,
|
||||
readable: true,
|
||||
signal: new AbortSignal(),
|
||||
writable: true,
|
||||
});
|
||||
|
||||
const ws = new tty.WriteStream(234);
|
||||
|
||||
process.stdin.setRawMode(true);
|
||||
process.stdin.setRawMode(false);
|
||||
process.stdin.isRaw;
|
||||
process.stdin.setRawMode(true).isRaw;
|
||||
|
||||
rs.isRaw;
|
||||
rs.setRawMode(true);
|
||||
rs.setRawMode(false);
|
||||
rs.setRawMode(true).isRaw;
|
||||
rs.isTTY;
|
||||
|
||||
ws.isPaused;
|
||||
ws.isTTY;
|
||||
ws.bytesWritten;
|
||||
ws.bytesRead;
|
||||
ws.columns;
|
||||
ws.rows;
|
||||
ws.isTTY;
|
||||
ws.clearLine(1);
|
||||
ws.clearLine(0);
|
||||
ws.clearScreenDown();
|
||||
ws.cursorTo(1);
|
||||
ws.cursorTo(1, 2);
|
||||
ws.cursorTo(1, () => {});
|
||||
ws.cursorTo(1, 2, () => {});
|
||||
ws.moveCursor(1, 2);
|
||||
ws.moveCursor(1, 2, () => {});
|
||||
ws.clearLine(1, () => {});
|
||||
ws.clearLine(0, () => {});
|
||||
ws.clearScreenDown(() => {});
|
||||
ws.cursorTo(1, () => {});
|
||||
|
||||
process.stdout.clearLine;
|
||||
process.stdout.clearScreenDown;
|
||||
process.stdout.cursorTo;
|
||||
process.stdout.moveCursor;
|
||||
process.stdout.getColorDepth;
|
||||
process.stdout.getWindowSize;
|
||||
@@ -1,18 +1,29 @@
|
||||
import { Worker } from "node:worker_threads";
|
||||
import { Worker as NodeWorker } from "node:worker_threads";
|
||||
import * as tsd from "tsd";
|
||||
|
||||
const _workerthread = new Worker("./worker.js");
|
||||
_workerthread;
|
||||
const worker = new Worker("./worker.ts");
|
||||
worker.addEventListener("message", (event: MessageEvent) => {
|
||||
console.log("Message from worker:", event.data);
|
||||
const webWorker = new Worker("./worker.js");
|
||||
|
||||
webWorker.addEventListener("message", event => {
|
||||
tsd.expectType<MessageEvent>(event);
|
||||
});
|
||||
worker.postMessage("Hello from main thread!");
|
||||
webWorker.addEventListener("error", event => {
|
||||
tsd.expectType<ErrorEvent>(event);
|
||||
});
|
||||
webWorker.addEventListener("messageerror", event => {
|
||||
tsd.expectType<MessageEvent>(event);
|
||||
});
|
||||
|
||||
const nodeWorker = new NodeWorker("./worker.ts");
|
||||
nodeWorker.on("message", event => {
|
||||
console.log("Message from worker:", event);
|
||||
});
|
||||
nodeWorker.postMessage("Hello from main thread!");
|
||||
|
||||
const workerURL = new URL("worker.ts", import.meta.url).href;
|
||||
const _worker2 = new Worker(workerURL);
|
||||
|
||||
worker.postMessage("hello");
|
||||
worker.onmessage = event => {
|
||||
nodeWorker.postMessage("hello");
|
||||
webWorker.onmessage = event => {
|
||||
console.log(event.data);
|
||||
};
|
||||
|
||||
@@ -20,15 +31,20 @@ worker.onmessage = event => {
|
||||
postMessage({ hello: "world" });
|
||||
|
||||
// On the main thread
|
||||
worker.postMessage({ hello: "world" });
|
||||
nodeWorker.postMessage({ hello: "world" });
|
||||
|
||||
// ...some time later
|
||||
worker.terminate();
|
||||
nodeWorker.terminate();
|
||||
|
||||
// Bun.pathToFileURL
|
||||
const _worker3 = new Worker(new URL("worker.ts", import.meta.url).href, {
|
||||
ref: true,
|
||||
smol: true,
|
||||
credentials: "",
|
||||
name: "a name",
|
||||
env: {
|
||||
envValue: "hello",
|
||||
},
|
||||
});
|
||||
|
||||
export { worker, _worker2, _worker3 };
|
||||
export { nodeWorker as worker, _worker2, _worker3 };
|
||||
|
||||
200
packages/bun-types/tty.d.ts
vendored
200
packages/bun-types/tty.d.ts
vendored
@@ -1,4 +1,31 @@
|
||||
/**
|
||||
* The `tty` module provides the `tty.ReadStream` and `tty.WriteStream` classes.
|
||||
* In most cases, it will not be necessary or possible to use this module directly.
|
||||
* However, it can be accessed using:
|
||||
*
|
||||
* ```js
|
||||
* const tty = require('tty');
|
||||
* ```
|
||||
*
|
||||
* When Node.js detects that it is being run with a text terminal ("TTY")
|
||||
* attached, `process.stdin` will, by default, be initialized as an instance of`tty.ReadStream` and both `process.stdout` and `process.stderr` will, by
|
||||
* default, be instances of `tty.WriteStream`. The preferred method of determining
|
||||
* whether Node.js is being run within a TTY context is to check that the value of
|
||||
* the `process.stdout.isTTY` property is `true`:
|
||||
*
|
||||
* ```console
|
||||
* $ node -p -e "Boolean(process.stdout.isTTY)"
|
||||
* true
|
||||
* $ node -p -e "Boolean(process.stdout.isTTY)" | cat
|
||||
* false
|
||||
* ```
|
||||
*
|
||||
* In most cases, there should be little to no reason for an application to
|
||||
* manually create instances of the `tty.ReadStream` and `tty.WriteStream`classes.
|
||||
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/tty.js)
|
||||
*/
|
||||
declare module "tty" {
|
||||
import * as net from "node:net";
|
||||
/**
|
||||
* The `tty.isatty()` method returns `true` if the given `fd` is associated with
|
||||
* a TTY and `false` if it is not, including whenever `fd` is not a non-negative
|
||||
@@ -7,10 +34,175 @@ declare module "tty" {
|
||||
* @param fd A numeric file descriptor
|
||||
*/
|
||||
function isatty(fd: number): boolean;
|
||||
|
||||
// TODO: tty-browserify only polyfills functions that throws errors, wouldn't make sense to have types at the moment
|
||||
var ReadStream: Function;
|
||||
var WriteStream: Function;
|
||||
/**
|
||||
* Represents the readable side of a TTY. In normal circumstances `process.stdin` will be the only `tty.ReadStream` instance in a Node.js
|
||||
* process and there should be no reason to create additional instances.
|
||||
* @since v0.5.8
|
||||
*/
|
||||
class ReadStream extends net.Socket {
|
||||
constructor(fd: number, options?: net.SocketConstructorOpts);
|
||||
/**
|
||||
* A `boolean` that is `true` if the TTY is currently configured to operate as a
|
||||
* raw device. Defaults to `false`.
|
||||
* @since v0.7.7
|
||||
*/
|
||||
isRaw: boolean;
|
||||
/**
|
||||
* Allows configuration of `tty.ReadStream` so that it operates as a raw device.
|
||||
*
|
||||
* When in raw mode, input is always available character-by-character, not
|
||||
* including modifiers. Additionally, all special processing of characters by the
|
||||
* terminal is disabled, including echoing input
|
||||
* characters. Ctrl+C will no longer cause a `SIGINT` when
|
||||
* in this mode.
|
||||
* @since v0.7.7
|
||||
* @param mode If `true`, configures the `tty.ReadStream` to operate as a raw device. If `false`, configures the `tty.ReadStream` to operate in its default mode. The `readStream.isRaw`
|
||||
* property will be set to the resulting mode.
|
||||
* @return The read stream instance.
|
||||
*/
|
||||
setRawMode(mode: boolean): this;
|
||||
/**
|
||||
* A `boolean` that is always `true` for `tty.ReadStream` instances.
|
||||
* @since v0.5.8
|
||||
*/
|
||||
isTTY: boolean;
|
||||
}
|
||||
/**
|
||||
* -1 - to the left from cursor
|
||||
* 0 - the entire line
|
||||
* 1 - to the right from cursor
|
||||
*/
|
||||
type Direction = -1 | 0 | 1;
|
||||
/**
|
||||
* Represents the writable side of a TTY. In normal circumstances,`process.stdout` and `process.stderr` will be the only`tty.WriteStream` instances created for a Node.js process and there
|
||||
* should be no reason to create additional instances.
|
||||
* @since v0.5.8
|
||||
*/
|
||||
class WriteStream extends net.Socket {
|
||||
constructor(fd: number);
|
||||
addListener(event: string, listener: (...args: any[]) => void): this;
|
||||
addListener(event: "resize", listener: () => void): this;
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
emit(event: "resize"): boolean;
|
||||
on(event: string, listener: (...args: any[]) => void): this;
|
||||
on(event: "resize", listener: () => void): this;
|
||||
once(event: string, listener: (...args: any[]) => void): this;
|
||||
once(event: "resize", listener: () => void): this;
|
||||
prependListener(event: string, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: "resize", listener: () => void): this;
|
||||
prependOnceListener(
|
||||
event: string,
|
||||
listener: (...args: any[]) => void,
|
||||
): this;
|
||||
prependOnceListener(event: "resize", listener: () => void): this;
|
||||
/**
|
||||
* `writeStream.clearLine()` clears the current line of this `WriteStream` in a
|
||||
* direction identified by `dir`.
|
||||
* @since v0.7.7
|
||||
* @param callback Invoked once the operation completes.
|
||||
* @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
||||
*/
|
||||
clearLine(dir: Direction, callback?: () => void): boolean;
|
||||
/**
|
||||
* `writeStream.clearScreenDown()` clears this `WriteStream` from the current
|
||||
* cursor down.
|
||||
* @since v0.7.7
|
||||
* @param callback Invoked once the operation completes.
|
||||
* @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
||||
*/
|
||||
clearScreenDown(callback?: () => void): boolean;
|
||||
/**
|
||||
* `writeStream.cursorTo()` moves this `WriteStream`'s cursor to the specified
|
||||
* position.
|
||||
* @since v0.7.7
|
||||
* @param callback Invoked once the operation completes.
|
||||
* @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
||||
*/
|
||||
cursorTo(x: number, y?: number, callback?: () => void): boolean;
|
||||
cursorTo(x: number, callback: () => void): boolean;
|
||||
/**
|
||||
* `writeStream.moveCursor()` moves this `WriteStream`'s cursor _relative_ to its
|
||||
* current position.
|
||||
* @since v0.7.7
|
||||
* @param callback Invoked once the operation completes.
|
||||
* @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
||||
*/
|
||||
moveCursor(dx: number, dy: number, callback?: () => void): boolean;
|
||||
/**
|
||||
* Returns:
|
||||
*
|
||||
* * `1` for 2,
|
||||
* * `4` for 16,
|
||||
* * `8` for 256,
|
||||
* * `24` for 16,777,216 colors supported.
|
||||
*
|
||||
* Use this to determine what colors the terminal supports. Due to the nature of
|
||||
* colors in terminals it is possible to either have false positives or false
|
||||
* negatives. It depends on process information and the environment variables that
|
||||
* may lie about what terminal is used.
|
||||
* It is possible to pass in an `env` object to simulate the usage of a specific
|
||||
* terminal. This can be useful to check how specific environment settings behave.
|
||||
*
|
||||
* To enforce a specific color support, use one of the below environment settings.
|
||||
*
|
||||
* * 2 colors: `FORCE_COLOR = 0` (Disables colors)
|
||||
* * 16 colors: `FORCE_COLOR = 1`
|
||||
* * 256 colors: `FORCE_COLOR = 2`
|
||||
* * 16,777,216 colors: `FORCE_COLOR = 3`
|
||||
*
|
||||
* Disabling color support is also possible by using the `NO_COLOR` and`NODE_DISABLE_COLORS` environment variables.
|
||||
* @since v9.9.0
|
||||
* @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal.
|
||||
*/
|
||||
getColorDepth(env?: object): number;
|
||||
/**
|
||||
* Returns `true` if the `writeStream` supports at least as many colors as provided
|
||||
* in `count`. Minimum support is 2 (black and white).
|
||||
*
|
||||
* This has the same false positives and negatives as described in `writeStream.getColorDepth()`.
|
||||
*
|
||||
* ```js
|
||||
* process.stdout.hasColors();
|
||||
* // Returns true or false depending on if `stdout` supports at least 16 colors.
|
||||
* process.stdout.hasColors(256);
|
||||
* // Returns true or false depending on if `stdout` supports at least 256 colors.
|
||||
* process.stdout.hasColors({ TMUX: '1' });
|
||||
* // Returns true.
|
||||
* process.stdout.hasColors(2 ** 24, { TMUX: '1' });
|
||||
* // Returns false (the environment setting pretends to support 2 ** 8 colors).
|
||||
* ```
|
||||
* @since v11.13.0, v10.16.0
|
||||
* @param [count=16] The number of colors that are requested (minimum 2).
|
||||
* @param [env=process.env] An object containing the environment variables to check. This enables simulating the usage of a specific terminal.
|
||||
*/
|
||||
hasColors(count?: number): boolean;
|
||||
hasColors(env?: object): boolean;
|
||||
hasColors(count: number, env?: object): boolean;
|
||||
/**
|
||||
* `writeStream.getWindowSize()` returns the size of the TTY
|
||||
* corresponding to this `WriteStream`. The array is of the type`[numColumns, numRows]` where `numColumns` and `numRows` represent the number
|
||||
* of columns and rows in the corresponding TTY.
|
||||
* @since v0.7.7
|
||||
*/
|
||||
getWindowSize(): [number, number];
|
||||
/**
|
||||
* A `number` specifying the number of columns the TTY currently has. This property
|
||||
* is updated whenever the `'resize'` event is emitted.
|
||||
* @since v0.7.7
|
||||
*/
|
||||
columns: number;
|
||||
/**
|
||||
* A `number` specifying the number of rows the TTY currently has. This property
|
||||
* is updated whenever the `'resize'` event is emitted.
|
||||
* @since v0.7.7
|
||||
*/
|
||||
rows: number;
|
||||
/**
|
||||
* A `boolean` that is always `true`.
|
||||
* @since v0.5.8
|
||||
*/
|
||||
isTTY: boolean;
|
||||
}
|
||||
}
|
||||
declare module "node:tty" {
|
||||
export * from "tty";
|
||||
|
||||
288
packages/bun-types/worker_threads.d.ts
vendored
288
packages/bun-types/worker_threads.d.ts
vendored
@@ -53,9 +53,10 @@
|
||||
*/
|
||||
declare module "worker_threads" {
|
||||
// import { Blob } from "node:buffer";
|
||||
import { Readable, Writable } from "node:stream";
|
||||
import { Context } from "node:vm";
|
||||
import { EventEmitter } from "node:events";
|
||||
// import { EventLoopUtilityFunction } from "node:perf_hooks";
|
||||
import { EventLoopUtilityFunction } from "node:perf_hooks";
|
||||
// import { FileHandle } from "node:fs/promises";
|
||||
// import { Readable, Writable } from "node:stream";
|
||||
import { URL } from "node:url";
|
||||
@@ -67,9 +68,9 @@ declare module "worker_threads" {
|
||||
const threadId: number;
|
||||
const workerData: any;
|
||||
|
||||
// interface WorkerPerformance {
|
||||
// eventLoopUtilization: EventLoopUtilityFunction;
|
||||
// }
|
||||
interface WorkerPerformance {
|
||||
eventLoopUtilization: EventLoopUtilityFunction;
|
||||
}
|
||||
type TransferListItem =
|
||||
| ArrayBuffer
|
||||
| MessagePort
|
||||
@@ -250,29 +251,74 @@ declare module "worker_threads" {
|
||||
off(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
}
|
||||
interface WorkerOptions {
|
||||
/**
|
||||
* A string specifying an identifying name for the DedicatedWorkerGlobalScope representing the scope of
|
||||
* the worker, which is mainly useful for debugging purposes.
|
||||
*/
|
||||
name?: string;
|
||||
|
||||
/**
|
||||
* Use less memory, but make the worker slower.
|
||||
*
|
||||
* Internally, this sets the heap size configuration in JavaScriptCore to be
|
||||
* the small heap instead of the large heap.
|
||||
*/
|
||||
smol?: boolean;
|
||||
|
||||
/**
|
||||
* When `true`, the worker will keep the parent thread alive until the worker is terminated or `unref`'d.
|
||||
* When `false`, the worker will not keep the parent thread alive.
|
||||
*
|
||||
* By default, this is `false`.
|
||||
*/
|
||||
ref?: boolean;
|
||||
|
||||
/**
|
||||
* In Bun, this does nothing.
|
||||
*/
|
||||
type?: string;
|
||||
|
||||
/**
|
||||
* List of arguments which would be stringified and appended to
|
||||
* `process.argv` in the worker. This is mostly similar to the `workerData`
|
||||
* but the values will be available on the global `process.argv` as if they
|
||||
* `Bun.argv` / `process.argv` in the worker. This is mostly similar to the `data`
|
||||
* but the values will be available on the global `Bun.argv` as if they
|
||||
* were passed as CLI options to the script.
|
||||
*/
|
||||
argv?: any[] | undefined;
|
||||
env?: Record<string, string> | typeof SHARE_ENV | undefined;
|
||||
eval?: boolean | undefined;
|
||||
workerData?: any;
|
||||
stdin?: boolean | undefined;
|
||||
stdout?: boolean | undefined;
|
||||
stderr?: boolean | undefined;
|
||||
execArgv?: string[] | undefined;
|
||||
resourceLimits?: ResourceLimits | undefined;
|
||||
// argv?: any[] | undefined;
|
||||
|
||||
/** If `true` and the first argument is a string, interpret the first argument to the constructor as a script that is executed once the worker is online. */
|
||||
// eval?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Additional data to send in the first worker message.
|
||||
* If set, specifies the initial value of process.env inside the Worker thread. As a special value, worker.SHARE_ENV may be used to specify that the parent thread and the child thread should share their environment variables; in that case, changes to one thread's process.env object affect the other thread as well. Default: process.env.
|
||||
*/
|
||||
transferList?: TransferListItem[] | undefined;
|
||||
env?:
|
||||
| Record<string, string>
|
||||
| typeof import("node:worker_threads")["SHARE_ENV"]
|
||||
| undefined;
|
||||
|
||||
/**
|
||||
* In Bun, this does nothing.
|
||||
*/
|
||||
credentials?: string;
|
||||
|
||||
/**
|
||||
* @default true
|
||||
*/
|
||||
trackUnmanagedFds?: boolean | undefined;
|
||||
// trackUnmanagedFds?: boolean;
|
||||
|
||||
workerData?: any;
|
||||
|
||||
/**
|
||||
* An array of objects that are transferred rather than cloned when being passed between threads.
|
||||
*/
|
||||
transferList?: import("worker_threads").TransferListItem[];
|
||||
|
||||
// resourceLimits?: import("worker_threads").ResourceLimits;
|
||||
// stdin?: boolean | undefined;
|
||||
// stdout?: boolean | undefined;
|
||||
// stderr?: boolean | undefined;
|
||||
// execArgv?: string[] | undefined;
|
||||
}
|
||||
interface ResourceLimits {
|
||||
/**
|
||||
@@ -356,76 +402,164 @@ declare module "worker_threads" {
|
||||
* ```
|
||||
* @since v10.5.0
|
||||
*/
|
||||
interface Worker extends EventTarget {
|
||||
onerror: ((this: Worker, ev: ErrorEvent) => any) | null;
|
||||
onmessage: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
onmessageerror: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
|
||||
addEventListener<K extends keyof WorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: Worker, ev: WorkerEventMap[K]) => any,
|
||||
options?: boolean | AddEventListenerOptions,
|
||||
): void;
|
||||
|
||||
removeEventListener<K extends keyof WorkerEventMap>(
|
||||
type: K,
|
||||
listener: (this: Worker, ev: WorkerEventMap[K]) => any,
|
||||
options?: boolean | EventListenerOptions,
|
||||
): void;
|
||||
|
||||
terminate(): void;
|
||||
|
||||
postMessage(message: any, transfer?: Transferable[]): void;
|
||||
|
||||
class Worker extends EventEmitter {
|
||||
/**
|
||||
* Keep the process alive until the worker is terminated or `unref`'d
|
||||
* If `stdin: true` was passed to the `Worker` constructor, this is a
|
||||
* writable stream. The data written to this stream will be made available in
|
||||
* the worker thread as `process.stdin`.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
readonly stdin: Writable | null;
|
||||
/**
|
||||
* This is a readable stream which contains data written to `process.stdout` inside the worker thread. If `stdout: true` was not passed to the `Worker` constructor, then data is piped to the
|
||||
* parent thread's `process.stdout` stream.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
readonly stdout: Readable;
|
||||
/**
|
||||
* This is a readable stream which contains data written to `process.stderr` inside the worker thread. If `stderr: true` was not passed to the `Worker` constructor, then data is piped to the
|
||||
* parent thread's `process.stderr` stream.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
readonly stderr: Readable;
|
||||
/**
|
||||
* An integer identifier for the referenced thread. Inside the worker thread,
|
||||
* it is available as `require('node:worker_threads').threadId`.
|
||||
* This value is unique for each `Worker` instance inside a single process.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
readonly threadId: number;
|
||||
/**
|
||||
* Provides the set of JS engine resource constraints for this Worker thread.
|
||||
* If the `resourceLimits` option was passed to the `Worker` constructor,
|
||||
* this matches its values.
|
||||
*
|
||||
* If the worker has stopped, the return value is an empty object.
|
||||
* @since v13.2.0, v12.16.0
|
||||
*/
|
||||
readonly resourceLimits?: ResourceLimits | undefined;
|
||||
/**
|
||||
* An object that can be used to query performance information from a worker
|
||||
* instance. Similar to `perf_hooks.performance`.
|
||||
* @since v15.1.0, v14.17.0, v12.22.0
|
||||
*/
|
||||
readonly performance: WorkerPerformance;
|
||||
/**
|
||||
* @param filename The path to the Worker’s main script or module.
|
||||
* Must be either an absolute path or a relative path (i.e. relative to the current working directory) starting with ./ or ../,
|
||||
* or a WHATWG URL object using file: protocol. If options.eval is true, this is a string containing JavaScript code rather than a path.
|
||||
*/
|
||||
constructor(filename: string | URL, options?: WorkerOptions);
|
||||
/**
|
||||
* Send a message to the worker that is received via `require('node:worker_threads').parentPort.on('message')`.
|
||||
* See `port.postMessage()` for more details.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
postMessage(
|
||||
value: any,
|
||||
transferList?: ReadonlyArray<TransferListItem>,
|
||||
): void;
|
||||
/**
|
||||
* Opposite of `unref()`, calling `ref()` on a previously `unref()`ed worker does _not_ let the program exit if it's the only active handle left (the default
|
||||
* behavior). If the worker is `ref()`ed, calling `ref()` again has
|
||||
* no effect.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
ref(): void;
|
||||
/**
|
||||
* Undo a previous `ref()`
|
||||
* Calling `unref()` on a worker allows the thread to exit if this is the only
|
||||
* active handle in the event system. If the worker is already `unref()`ed calling`unref()` again has no effect.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
unref(): void;
|
||||
|
||||
/**
|
||||
* Unique per-process thread ID. Main thread ID is always `0`.
|
||||
* Stop all JavaScript execution in the worker thread as soon as possible.
|
||||
* Returns a Promise for the exit code that is fulfilled when the `'exit' event` is emitted.
|
||||
* @since v10.5.0
|
||||
*/
|
||||
readonly threadId: number;
|
||||
}
|
||||
var Worker: {
|
||||
prototype: Worker;
|
||||
new (stringUrl: string | URL, options?: WorkerOptions): Worker;
|
||||
};
|
||||
interface WorkerOptions {
|
||||
name?: string;
|
||||
|
||||
terminate(): Promise<number>;
|
||||
/**
|
||||
* Use less memory, but make the worker slower.
|
||||
* Returns a readable stream for a V8 snapshot of the current state of the Worker.
|
||||
* See `v8.getHeapSnapshot()` for more details.
|
||||
*
|
||||
* Internally, this sets the heap size configuration in JavaScriptCore to be
|
||||
* the small heap instead of the large heap.
|
||||
* If the Worker thread is no longer running, which may occur before the `'exit' event` is emitted, the returned `Promise` is rejected
|
||||
* immediately with an `ERR_WORKER_NOT_RUNNING` error.
|
||||
* @since v13.9.0, v12.17.0
|
||||
* @return A promise for a Readable Stream containing a V8 heap snapshot
|
||||
*/
|
||||
smol?: boolean;
|
||||
|
||||
/**
|
||||
* When `true`, the worker will keep the parent thread alive until the worker is terminated or `unref`'d.
|
||||
* When `false`, the worker will not keep the parent thread alive.
|
||||
*
|
||||
* By default, this is `false`.
|
||||
*/
|
||||
ref?: boolean;
|
||||
|
||||
/**
|
||||
* Does nothing in Bun
|
||||
*/
|
||||
type?: string;
|
||||
}
|
||||
|
||||
interface WorkerEventMap {
|
||||
message: MessageEvent;
|
||||
messageerror: MessageEvent;
|
||||
error: ErrorEvent;
|
||||
open: Event;
|
||||
close: Event;
|
||||
getHeapSnapshot(): Promise<Readable>;
|
||||
addListener(event: "error", listener: (err: Error) => void): this;
|
||||
addListener(event: "exit", listener: (exitCode: number) => void): this;
|
||||
addListener(event: "message", listener: (value: any) => void): this;
|
||||
addListener(event: "messageerror", listener: (error: Error) => void): this;
|
||||
addListener(event: "online", listener: () => void): this;
|
||||
addListener(
|
||||
event: string | symbol,
|
||||
listener: (...args: any[]) => void,
|
||||
): this;
|
||||
emit(event: "error", err: Error): boolean;
|
||||
emit(event: "exit", exitCode: number): boolean;
|
||||
emit(event: "message", value: any): boolean;
|
||||
emit(event: "messageerror", error: Error): boolean;
|
||||
emit(event: "online"): boolean;
|
||||
emit(event: string | symbol, ...args: any[]): boolean;
|
||||
on(event: "error", listener: (err: Error) => void): this;
|
||||
on(event: "exit", listener: (exitCode: number) => void): this;
|
||||
on(event: "message", listener: (value: any) => void): this;
|
||||
on(event: "messageerror", listener: (error: Error) => void): this;
|
||||
on(event: "online", listener: () => void): this;
|
||||
on(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
once(event: "error", listener: (err: Error) => void): this;
|
||||
once(event: "exit", listener: (exitCode: number) => void): this;
|
||||
once(event: "message", listener: (value: any) => void): this;
|
||||
once(event: "messageerror", listener: (error: Error) => void): this;
|
||||
once(event: "online", listener: () => void): this;
|
||||
once(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
prependListener(event: "error", listener: (err: Error) => void): this;
|
||||
prependListener(event: "exit", listener: (exitCode: number) => void): this;
|
||||
prependListener(event: "message", listener: (value: any) => void): this;
|
||||
prependListener(
|
||||
event: "messageerror",
|
||||
listener: (error: Error) => void,
|
||||
): this;
|
||||
prependListener(event: "online", listener: () => void): this;
|
||||
prependListener(
|
||||
event: string | symbol,
|
||||
listener: (...args: any[]) => void,
|
||||
): this;
|
||||
prependOnceListener(event: "error", listener: (err: Error) => void): this;
|
||||
prependOnceListener(
|
||||
event: "exit",
|
||||
listener: (exitCode: number) => void,
|
||||
): this;
|
||||
prependOnceListener(event: "message", listener: (value: any) => void): this;
|
||||
prependOnceListener(
|
||||
event: "messageerror",
|
||||
listener: (error: Error) => void,
|
||||
): this;
|
||||
prependOnceListener(event: "online", listener: () => void): this;
|
||||
prependOnceListener(
|
||||
event: string | symbol,
|
||||
listener: (...args: any[]) => void,
|
||||
): this;
|
||||
removeListener(event: "error", listener: (err: Error) => void): this;
|
||||
removeListener(event: "exit", listener: (exitCode: number) => void): this;
|
||||
removeListener(event: "message", listener: (value: any) => void): this;
|
||||
removeListener(
|
||||
event: "messageerror",
|
||||
listener: (error: Error) => void,
|
||||
): this;
|
||||
removeListener(event: "online", listener: () => void): this;
|
||||
removeListener(
|
||||
event: string | symbol,
|
||||
listener: (...args: any[]) => void,
|
||||
): this;
|
||||
off(event: "error", listener: (err: Error) => void): this;
|
||||
off(event: "exit", listener: (exitCode: number) => void): this;
|
||||
off(event: "message", listener: (value: any) => void): this;
|
||||
off(event: "messageerror", listener: (error: Error) => void): this;
|
||||
off(event: "online", listener: () => void): this;
|
||||
off(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
}
|
||||
|
||||
interface BroadcastChannelEventMap {
|
||||
|
||||
29
packages/bun-vscode/.vscode/launch.json
vendored
Normal file
29
packages/bun-vscode/.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Extension",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}",
|
||||
"${workspaceFolder}/example"
|
||||
],
|
||||
"outFiles": ["${workspaceFolder}/dist/**/*.js"],
|
||||
"preLaunchTask": "Build (watch)"
|
||||
},
|
||||
{
|
||||
"name": "Extension (web)",
|
||||
"type": "extensionHost",
|
||||
"debugWebWorkerHost": true,
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}",
|
||||
"--extensionDevelopmentKind=web",
|
||||
"${workspaceFolder}/example"
|
||||
],
|
||||
"outFiles": ["${workspaceFolder}/dist/**/*.js"],
|
||||
"preLaunchTask": "Build (watch)"
|
||||
}
|
||||
]
|
||||
}
|
||||
9
packages/bun-vscode/.vscode/settings.json
vendored
Normal file
9
packages/bun-vscode/.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"files.exclude": {
|
||||
"out": false // set this to true to hide the "out" folder with the compiled JS files
|
||||
},
|
||||
"search.exclude": {
|
||||
"out": true // set this to false to include "out" folder in search results
|
||||
},
|
||||
"typescript.tsc.autoDetect": "off",
|
||||
}
|
||||
16
packages/bun-vscode/.vscode/tasks.json
vendored
Normal file
16
packages/bun-vscode/.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Build",
|
||||
"type": "shell",
|
||||
"command": "bun run build"
|
||||
},
|
||||
{
|
||||
"label": "Build (watch)",
|
||||
"type": "shell",
|
||||
"command": "bun run build:watch",
|
||||
"isBackground": true
|
||||
}
|
||||
]
|
||||
}
|
||||
1
packages/bun-vscode/README.md
Normal file
1
packages/bun-vscode/README.md
Normal file
@@ -0,0 +1 @@
|
||||
# Debug Adapter Protocol for Bun
|
||||
5
packages/bun-vscode/TODO.md
Normal file
5
packages/bun-vscode/TODO.md
Normal file
@@ -0,0 +1,5 @@
|
||||
* Off-by-one for debug lines
|
||||
* Formatting values in console (some code is wired up)
|
||||
* Play button on debugger actually starting Bun
|
||||
* bun debug or --inspect command added to Bun, not need Bun.serve
|
||||
* Breakpoint actually setting
|
||||
BIN
packages/bun-vscode/bun.lockb
Executable file
BIN
packages/bun-vscode/bun.lockb
Executable file
Binary file not shown.
19
packages/bun-vscode/example/.vscode/launch.json
vendored
Normal file
19
packages/bun-vscode/example/.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "bun",
|
||||
"request": "launch",
|
||||
"name": "Debug",
|
||||
"program": "${workspaceFolder}/example.js",
|
||||
"stopOnEntry": true
|
||||
},
|
||||
{
|
||||
"type": "bun",
|
||||
"request": "attach",
|
||||
"name": "Attach",
|
||||
"program": "${workspaceFolder}/example.js",
|
||||
"stopOnEntry": true
|
||||
}
|
||||
]
|
||||
}
|
||||
BIN
packages/bun-vscode/example/bun.lockb
Executable file
BIN
packages/bun-vscode/example/bun.lockb
Executable file
Binary file not shown.
63
packages/bun-vscode/example/example.js
Normal file
63
packages/bun-vscode/example/example.js
Normal file
@@ -0,0 +1,63 @@
|
||||
// @bun
|
||||
const express = import.meta.require("express");
|
||||
const app = express();
|
||||
import { readFile } from "node:fs/promises";
|
||||
|
||||
app
|
||||
.get("/", (req, res) => {
|
||||
console.log("I am logging a request!");
|
||||
readFile(import.meta.path, "utf-8").then(data => {
|
||||
console.log(data.length);
|
||||
debugger;
|
||||
res.send("hello world");
|
||||
});
|
||||
})
|
||||
.listen(3000);
|
||||
|
||||
const va = 1;
|
||||
let vb = 2;
|
||||
var vc = 3;
|
||||
|
||||
function fa() {
|
||||
fb();
|
||||
}
|
||||
|
||||
function fb() {
|
||||
fc();
|
||||
}
|
||||
|
||||
function fc() {
|
||||
fd();
|
||||
}
|
||||
|
||||
function fd() {
|
||||
let map = new Map([
|
||||
[1, 2],
|
||||
[2, 3],
|
||||
[3, 4],
|
||||
]);
|
||||
let set = new Set([1, 2, 3, 4, 5]);
|
||||
let arr = [1, 2, 3, 4, 5];
|
||||
let obj = {
|
||||
a: 1,
|
||||
b: 2,
|
||||
c: 3,
|
||||
};
|
||||
function fd1() {
|
||||
let date = new Date();
|
||||
console.log(new Error().stack);
|
||||
debugger;
|
||||
console.log(date);
|
||||
}
|
||||
fd1();
|
||||
}
|
||||
|
||||
Bun.serve({
|
||||
port: 9229,
|
||||
inspector: true,
|
||||
development: true,
|
||||
fetch(request, server) {
|
||||
console.log(request);
|
||||
return new Response(request.url);
|
||||
},
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user