mirror of
https://github.com/oven-sh/bun
synced 2026-02-04 07:58:54 +00:00
Compare commits
520 Commits
ciro/remov
...
build-scri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ca0d96837 | ||
|
|
2091551c92 | ||
|
|
a2bc49a991 | ||
|
|
398e93249e | ||
|
|
1ec44688b7 | ||
|
|
7aa2360542 | ||
|
|
e87c599e6a | ||
|
|
d60da3d186 | ||
|
|
3ef84816a6 | ||
|
|
6e9b592c56 | ||
|
|
a6b5543bd8 | ||
|
|
a4759eb147 | ||
|
|
732ed2b7df | ||
|
|
63fab9a82b | ||
|
|
ff17b427c8 | ||
|
|
ca44df7c88 | ||
|
|
9daa7ea555 | ||
|
|
2f0020f00f | ||
|
|
599d27d93e | ||
|
|
696f209ec1 | ||
|
|
1a6ead667b | ||
|
|
bbf2f5d716 | ||
|
|
9574044083 | ||
|
|
822b725bec | ||
|
|
dc775f75f0 | ||
|
|
738947bdec | ||
|
|
b7efeafc03 | ||
|
|
f5d1a17a5c | ||
|
|
03024e6b4e | ||
|
|
1d61676c7b | ||
|
|
23fb63f45c | ||
|
|
0be71edf3f | ||
|
|
6b50deb7b7 | ||
|
|
6ad3e6a5e3 | ||
|
|
b1dce1e241 | ||
|
|
cc42052039 | ||
|
|
ecf5aea071 | ||
|
|
79d21a0d02 | ||
|
|
43949151b1 | ||
|
|
16aad326e4 | ||
|
|
1a6f2d38da | ||
|
|
c6149d36b3 | ||
|
|
34e493f945 | ||
|
|
866b301626 | ||
|
|
cabc0fa0e6 | ||
|
|
d703354fcd | ||
|
|
37036f2eb0 | ||
|
|
ff0dc62314 | ||
|
|
f05f13780e | ||
|
|
4d74855fd7 | ||
|
|
5088a360b5 | ||
|
|
891b1907ae | ||
|
|
ae988642fb | ||
|
|
75e442c170 | ||
|
|
8808af1c99 | ||
|
|
b9d2a03ffc | ||
|
|
157b56cca5 | ||
|
|
caaeae123a | ||
|
|
20235a0d22 | ||
|
|
ae19489250 | ||
|
|
242c48f302 | ||
|
|
110849355c | ||
|
|
36fd3115f1 | ||
|
|
7d9b876968 | ||
|
|
40f0da1254 | ||
|
|
aea3964abd | ||
|
|
780bff781d | ||
|
|
c6a2ab5165 | ||
|
|
ef1c660708 | ||
|
|
11f8d3cb24 | ||
|
|
3ac9c3cc1c | ||
|
|
aa0f54cb93 | ||
|
|
8a3f882ef7 | ||
|
|
b7dd57ac32 | ||
|
|
cf1c7772f3 | ||
|
|
329d5e2af5 | ||
|
|
0098678a1d | ||
|
|
bf4c2caa11 | ||
|
|
226f42e04a | ||
|
|
96d19fcfe2 | ||
|
|
58483426cd | ||
|
|
25f7ef7338 | ||
|
|
412806bb22 | ||
|
|
4c87406391 | ||
|
|
5f7b96b58f | ||
|
|
f1151a84ad | ||
|
|
cdc68a2237 | ||
|
|
e866793eb3 | ||
|
|
cf9c418bcb | ||
|
|
138ef1328e | ||
|
|
e5e6d7ca43 | ||
|
|
cb81fc5445 | ||
|
|
d8caf7f9fa | ||
|
|
6f8ceb0ea9 | ||
|
|
02b589b2ce | ||
|
|
55d59ebf1f | ||
|
|
e1bc6c55d5 | ||
|
|
e42dede529 | ||
|
|
73ef93ffa3 | ||
|
|
475f71a2a1 | ||
|
|
af6035ce36 | ||
|
|
bfa395d1d5 | ||
|
|
76bb5b8619 | ||
|
|
6354e608a7 | ||
|
|
28d9527189 | ||
|
|
fbcd843c58 | ||
|
|
6b0c2383d5 | ||
|
|
f1a748fcab | ||
|
|
87296405a7 | ||
|
|
4dfbabd590 | ||
|
|
ea1135a464 | ||
|
|
f1755df6f0 | ||
|
|
b01f67857f | ||
|
|
9fe7ea340d | ||
|
|
68ba6b9e79 | ||
|
|
a703d2d019 | ||
|
|
5137213f86 | ||
|
|
c98da7daf7 | ||
|
|
25252c9b46 | ||
|
|
21ff566d69 | ||
|
|
a36a01e235 | ||
|
|
9ae870546b | ||
|
|
a4b0817cd3 | ||
|
|
c2a5451e93 | ||
|
|
150ae032e8 | ||
|
|
37ee951448 | ||
|
|
b8c70ba6cf | ||
|
|
cbcf9506d9 | ||
|
|
92bd629e60 | ||
|
|
e7031b07ae | ||
|
|
41a5ebe09f | ||
|
|
cd97c21038 | ||
|
|
57d22908d1 | ||
|
|
749c51d71a | ||
|
|
80bbad6568 | ||
|
|
da1b3d2007 | ||
|
|
050a4b5c71 | ||
|
|
6f52b649da | ||
|
|
bbc621adff | ||
|
|
71c223e111 | ||
|
|
ee25618197 | ||
|
|
4f3ef07455 | ||
|
|
fad58168d2 | ||
|
|
4fefb8507c | ||
|
|
39d5c8a8a5 | ||
|
|
6a43f7f52d | ||
|
|
c486a049a8 | ||
|
|
5a0b935231 | ||
|
|
688ddbda74 | ||
|
|
b9fba61153 | ||
|
|
dfca8147df | ||
|
|
b85c30cd89 | ||
|
|
f83e42de20 | ||
|
|
927dde7b34 | ||
|
|
c85576b15d | ||
|
|
db60af1a44 | ||
|
|
292035efcb | ||
|
|
823d790b1c | ||
|
|
5573b2e899 | ||
|
|
2f0789af7c | ||
|
|
f8e640c018 | ||
|
|
b0018465cc | ||
|
|
dd057613b9 | ||
|
|
bf14a09a23 | ||
|
|
81711faebe | ||
|
|
86fd13643b | ||
|
|
861be5560e | ||
|
|
7f3e6f23f6 | ||
|
|
5f34387bea | ||
|
|
e22383dff9 | ||
|
|
c1a5b4acc5 | ||
|
|
61b343cf7d | ||
|
|
ec3487867c | ||
|
|
3c52344b53 | ||
|
|
f37d89afb1 | ||
|
|
a0b5006b78 | ||
|
|
1a10f2b46e | ||
|
|
acc0fe6db4 | ||
|
|
6e89419250 | ||
|
|
d5aa7265df | ||
|
|
fe27a181d3 | ||
|
|
145b9e7d09 | ||
|
|
fab33be408 | ||
|
|
da27f22622 | ||
|
|
20d33e480b | ||
|
|
066e2f2589 | ||
|
|
9697a2b058 | ||
|
|
b37f94d396 | ||
|
|
eff2ea6271 | ||
|
|
d105b048b1 | ||
|
|
d356e27a4d | ||
|
|
96e84b276b | ||
|
|
60ef13e079 | ||
|
|
10ce5ddd24 | ||
|
|
cab78045b7 | ||
|
|
d5e3ea0ab7 | ||
|
|
9f2533e24c | ||
|
|
bb66bba1bf | ||
|
|
ccd92a98e5 | ||
|
|
d191ec5933 | ||
|
|
5f72f207de | ||
|
|
40858b4f25 | ||
|
|
de3ad9840b | ||
|
|
82c89bd8fc | ||
|
|
314666d8ae | ||
|
|
65df049fb4 | ||
|
|
dfad48a6de | ||
|
|
bf7b327f68 | ||
|
|
a9e800ad5f | ||
|
|
1a8ec98fd0 | ||
|
|
484ce2ce60 | ||
|
|
4830e2d817 | ||
|
|
ff2080da1e | ||
|
|
191a06207f | ||
|
|
ff15281b49 | ||
|
|
d563b6485a | ||
|
|
3b199cde59 | ||
|
|
131d8f5c80 | ||
|
|
3b6f1bb20e | ||
|
|
19bed6e05a | ||
|
|
00f9410d92 | ||
|
|
36fbad3709 | ||
|
|
087b83c56d | ||
|
|
8c548d2593 | ||
|
|
2338f16b36 | ||
|
|
eaa7858df4 | ||
|
|
21b5bdf8b5 | ||
|
|
864cbc1555 | ||
|
|
c082ec5c9d | ||
|
|
3908ac073b | ||
|
|
b76376f8a6 | ||
|
|
e58cf69f94 | ||
|
|
dd22c71612 | ||
|
|
a0032d1b5c | ||
|
|
ba6e421e3b | ||
|
|
b23ba1fe18 | ||
|
|
3ff29955a1 | ||
|
|
6df1bd5ed8 | ||
|
|
43a5530f76 | ||
|
|
33ca0921f2 | ||
|
|
995bd374d8 | ||
|
|
45f9ec70dd | ||
|
|
a994bda80a | ||
|
|
3003dcb58f | ||
|
|
7c27f3f9b4 | ||
|
|
604cbd0228 | ||
|
|
85baa0f3c0 | ||
|
|
3a5077c622 | ||
|
|
3a17a2cb43 | ||
|
|
52f2c22e3b | ||
|
|
623b73171c | ||
|
|
bf45791ae6 | ||
|
|
7b9fe84cbd | ||
|
|
0189dbb1b5 | ||
|
|
3493dc634e | ||
|
|
709b485294 | ||
|
|
43733069bb | ||
|
|
9621721e3d | ||
|
|
b07e15ac29 | ||
|
|
a09e633b6f | ||
|
|
c12baa3485 | ||
|
|
24182a4de0 | ||
|
|
30ae61fb03 | ||
|
|
478e2558e4 | ||
|
|
9dc36adf83 | ||
|
|
748209f79b | ||
|
|
8b4ec84fb1 | ||
|
|
f5b0951191 | ||
|
|
fc0335b987 | ||
|
|
4ea31d474f | ||
|
|
46610c7254 | ||
|
|
31cffe4ec0 | ||
|
|
bafaa9e80e | ||
|
|
a4aa146a2a | ||
|
|
ae656e8a4c | ||
|
|
422c17d76c | ||
|
|
8eabade199 | ||
|
|
72d925152c | ||
|
|
537c62396d | ||
|
|
77192072c8 | ||
|
|
fa952b163c | ||
|
|
eedb3e530c | ||
|
|
fcfc68a43e | ||
|
|
1224540c89 | ||
|
|
5ef5dbe764 | ||
|
|
ac8d1726b6 | ||
|
|
18ec3a2190 | ||
|
|
bcc2289ddb | ||
|
|
48cefe14bd | ||
|
|
1454e9a0a6 | ||
|
|
22be784714 | ||
|
|
ae5b1280e1 | ||
|
|
e9d1e7ac5e | ||
|
|
baee597054 | ||
|
|
9ad75c6b3a | ||
|
|
2f7cd38d81 | ||
|
|
b8ca523bfb | ||
|
|
3568702eca | ||
|
|
c44d489ed0 | ||
|
|
6c55ff6008 | ||
|
|
ba5dd63eb6 | ||
|
|
0a99416764 | ||
|
|
49516c8d40 | ||
|
|
fab96a74ea | ||
|
|
bd6a605120 | ||
|
|
6e0f58bc05 | ||
|
|
6ff074ae27 | ||
|
|
dc144f9519 | ||
|
|
7c8701c96e | ||
|
|
27d0912f9d | ||
|
|
ee30e8660c | ||
|
|
1b8a72e724 | ||
|
|
7f8143c5c9 | ||
|
|
791ba794e8 | ||
|
|
5427646a30 | ||
|
|
5e619ee337 | ||
|
|
6e6c10bc1f | ||
|
|
5b48bb1d5d | ||
|
|
b521b06147 | ||
|
|
c451ef5f31 | ||
|
|
99b9be7a34 | ||
|
|
4786c6139e | ||
|
|
72a33e487d | ||
|
|
25a09d8858 | ||
|
|
731a85f80d | ||
|
|
c5010e9a12 | ||
|
|
1ba57351b0 | ||
|
|
ccb76c20e9 | ||
|
|
376c02e62c | ||
|
|
80e4e60e57 | ||
|
|
61cb11dc2f | ||
|
|
5df1c2689e | ||
|
|
60af985863 | ||
|
|
8ac8e4dc5f | ||
|
|
d92ebf2a99 | ||
|
|
26201671d1 | ||
|
|
1dcd1bba3d | ||
|
|
0bbef7eb94 | ||
|
|
386bc212b1 | ||
|
|
2e44ee019a | ||
|
|
5b09384f01 | ||
|
|
2cba070756 | ||
|
|
c6187e3e3a | ||
|
|
c85dd4e3bf | ||
|
|
12f070d1a0 | ||
|
|
10608ea7d8 | ||
|
|
8d6f19516f | ||
|
|
f3118d0f22 | ||
|
|
7ac8ff6d8a | ||
|
|
456aa1fc00 | ||
|
|
af95bfc7b1 | ||
|
|
c614d5b1da | ||
|
|
6fd47d6846 | ||
|
|
6a756bf979 | ||
|
|
bab8c0c0b2 | ||
|
|
3e4c0918a4 | ||
|
|
fe7b04085a | ||
|
|
e5ff1fdc25 | ||
|
|
ac5f2e96c7 | ||
|
|
e76d212f18 | ||
|
|
c7a08f1ec5 | ||
|
|
bb8c0d97ba | ||
|
|
2580d199a4 | ||
|
|
0184097ba0 | ||
|
|
dc5023e26e | ||
|
|
29234f3ecb | ||
|
|
10a60b5d91 | ||
|
|
75209021c8 | ||
|
|
42d5a4e506 | ||
|
|
57fa0dcee4 | ||
|
|
2e263db3da | ||
|
|
d6fedd1d9d | ||
|
|
347dc264ac | ||
|
|
16d08564e1 | ||
|
|
f56e6c7d54 | ||
|
|
8806bf9c4e | ||
|
|
656ad7c7ae | ||
|
|
4b3c873ca7 | ||
|
|
30a9c394e8 | ||
|
|
ff905ba5f5 | ||
|
|
c2eef9eded | ||
|
|
1d89c5988e | ||
|
|
eb31675af9 | ||
|
|
c65a911a57 | ||
|
|
8f5fbd50cd | ||
|
|
a1c3ea0b33 | ||
|
|
7fb25acf5f | ||
|
|
9523ea3aea | ||
|
|
c717f52459 | ||
|
|
d38890d30c | ||
|
|
814b48116a | ||
|
|
152a7e1bd3 | ||
|
|
b10887c7cb | ||
|
|
43f0913c38 | ||
|
|
6ca5896e86 | ||
|
|
784022785d | ||
|
|
bc64e6b8e5 | ||
|
|
9dd22f0d8d | ||
|
|
00472fbb1c | ||
|
|
4fb6056f85 | ||
|
|
30b0b49594 | ||
|
|
18bba0dc44 | ||
|
|
0a5fa2dd8c | ||
|
|
3a63f46dc0 | ||
|
|
e3b7635fa4 | ||
|
|
dc051ae810 | ||
|
|
ec09e6e238 | ||
|
|
d9f7d053d5 | ||
|
|
fd55cdae34 | ||
|
|
75b64d9b8e | ||
|
|
c467fd9381 | ||
|
|
f127dbd127 | ||
|
|
774d6a1efb | ||
|
|
a6d0e64196 | ||
|
|
c3142e1ae0 | ||
|
|
9693fc9183 | ||
|
|
61805b5344 | ||
|
|
8de0ec598b | ||
|
|
fca20f1a43 | ||
|
|
323011980c | ||
|
|
b49853c24f | ||
|
|
f0bcc631d5 | ||
|
|
ce20abef5c | ||
|
|
ecfe2a0f1b | ||
|
|
6a7c35cb52 | ||
|
|
8461a20d9b | ||
|
|
ff9ab489b0 | ||
|
|
55c5ed3d3e | ||
|
|
a8fcb48609 | ||
|
|
4b8f89cb73 | ||
|
|
45d0c1432b | ||
|
|
922f9191b0 | ||
|
|
c456622161 | ||
|
|
b2c697296e | ||
|
|
2285735abd | ||
|
|
7f1880cafb | ||
|
|
1f8c121652 | ||
|
|
370db08891 | ||
|
|
62b4c2141b | ||
|
|
5f55d45d3b | ||
|
|
4d03105614 | ||
|
|
367b69dff5 | ||
|
|
96f29e8555 | ||
|
|
634cc82dd0 | ||
|
|
474d4cf3ce | ||
|
|
15603ab596 | ||
|
|
1041655ff6 | ||
|
|
912bccb624 | ||
|
|
3d11e5042e | ||
|
|
34b02ada14 | ||
|
|
ad5340d971 | ||
|
|
06231b51bd | ||
|
|
856654a065 | ||
|
|
d43922d3e1 | ||
|
|
98b3aeb9ec | ||
|
|
cfedd70110 | ||
|
|
470e523c52 | ||
|
|
e1eb4a4753 | ||
|
|
ad5574b86f | ||
|
|
ec8e5d7cd3 | ||
|
|
af4e844b62 | ||
|
|
fdbccef110 | ||
|
|
4539d168b4 | ||
|
|
c50e837f34 | ||
|
|
99b979e5ca | ||
|
|
7be0669840 | ||
|
|
59bbedf251 | ||
|
|
aa3aa888d5 | ||
|
|
c689b2b265 | ||
|
|
5102a94430 | ||
|
|
5f7282fea3 | ||
|
|
6c9b3de217 | ||
|
|
d16c136e77 | ||
|
|
df83028546 | ||
|
|
3d99c9af24 | ||
|
|
ec082db67c | ||
|
|
230c760b42 | ||
|
|
d3fdb17321 | ||
|
|
c3157e2c50 | ||
|
|
d1ac51e442 | ||
|
|
0905e43049 | ||
|
|
0b821c6e25 | ||
|
|
f339e51d84 | ||
|
|
2528caa598 | ||
|
|
260366f1a6 | ||
|
|
bdc65d0f87 | ||
|
|
6566b8a6d6 | ||
|
|
d06498bc96 | ||
|
|
aff93ba9df | ||
|
|
d50acbc0bd | ||
|
|
12bb5d03a2 | ||
|
|
942061a40a | ||
|
|
096cfeebc0 | ||
|
|
f3e974102d | ||
|
|
81cebc789a | ||
|
|
ce8474b2a1 | ||
|
|
cd5a97b383 | ||
|
|
9399b70138 | ||
|
|
d4b3f16388 | ||
|
|
cc0bce62e3 | ||
|
|
87cbaae4f0 | ||
|
|
6e6cfcd839 | ||
|
|
e5de03b8eb | ||
|
|
0d76c416ed | ||
|
|
bb13798d98 | ||
|
|
ecb6c810c8 | ||
|
|
6c77d5e882 | ||
|
|
e98c235e30 | ||
|
|
c03b35ecfc | ||
|
|
6e55612d36 | ||
|
|
0457d6a748 | ||
|
|
1c99cfebeb | ||
|
|
33aaf3376b | ||
|
|
fbde05b339 | ||
|
|
bbaeeaeed2 | ||
|
|
396dc78522 | ||
|
|
adde0af7b4 | ||
|
|
40fcf25e01 | ||
|
|
c66e290801 | ||
|
|
b59868ced1 |
30
.buildkite/bootstrap.yml
Normal file
30
.buildkite/bootstrap.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
# Uploads the latest CI workflow to Buildkite.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# Changes to this file must be manually edited here:
|
||||
# https://buildkite.com/bun/bun/settings/steps
|
||||
steps:
|
||||
- if: "build.pull_request.repository.fork"
|
||||
block: ":eyes:"
|
||||
prompt: "Did you review the PR?"
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
command: "buildkite-agent pipeline upload .buildkite/ci.yml"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
|
||||
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
|
||||
label: ":github:"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
- "darwin-x64-build-bun"
|
||||
- "linux-aarch64-build-bun"
|
||||
- "linux-x64-build-bun"
|
||||
- "linux-x64-baseline-build-bun"
|
||||
- "windows-x64-build-bun"
|
||||
- "windows-x64-baseline-build-bun"
|
||||
command:
|
||||
- ".buildkite/scripts/upload-release.sh"
|
||||
63
.buildkite/ci.md
Normal file
63
.buildkite/ci.md
Normal file
@@ -0,0 +1,63 @@
|
||||
## CI
|
||||
|
||||
How does CI work?
|
||||
|
||||
### Building
|
||||
|
||||
Bun is built on macOS, Linux, and Windows. The process is split into the following steps, the first 3 of which are able to run in parallel:
|
||||
|
||||
#### 1. `build-deps`
|
||||
|
||||
Builds the static libaries in `src/deps` and outputs a directory: `build/bun-deps`.
|
||||
|
||||
- on Windows, this runs the script: [`scripts/all-dependencies.ps1`](scripts/all-dependencies.ps1)
|
||||
- on macOS and Linux, this runs the script: [`scripts/all-dependencies.sh`](scripts/all-dependencies.sh)
|
||||
|
||||
#### 2. `build-zig`
|
||||
|
||||
Builds the Zig object file: `build/bun-zig.o`. Since `zig build` supports cross-compiling, this step is run on macOS aarch64 since we have observed it to be the fastest.
|
||||
|
||||
- on macOS and Linux, this runs the script: [`scripts/build-bun-zig.sh`](scripts/build-bun-zig.sh)
|
||||
|
||||
#### 3. `build-cpp`
|
||||
|
||||
Builds the C++ object file: `build/bun-cpp-objects.a`.
|
||||
|
||||
- on Windows, this runs the script: [`scripts/build-bun-cpp.ps1`](scripts/build-bun-cpp.ps1)
|
||||
- on macOS and Linux, this runs the script: [`scripts/build-bun-cpp.sh`](scripts/build-bun-cpp.sh)
|
||||
|
||||
#### 4. `link` / `build-bun`
|
||||
|
||||
After the `build-deps`, `build-zig`, and `build-cpp` steps have completed, this step links the Zig object file and C++ object file into a single binary: `bun-<os>-<arch>.zip`.
|
||||
|
||||
- on Windows, this runs the script: [`scripts/buildkite-link-bun.ps1`](scripts/buildkite-link-bun.ps1)
|
||||
- on macOS and Linux, this runs the script: [`scripts/buildkite-link-bun.sh`](scripts/buildkite-link-bun.sh)
|
||||
|
||||
To speed up the build, thare are two options:
|
||||
|
||||
- `--fast`: This disables the LTO (link-time optimization) step.
|
||||
- without `--fast`: This runs the LTO step, which is the default. The binaries that are release to Github are always built with LTO.
|
||||
|
||||
### Testing
|
||||
|
||||
### FAQ
|
||||
|
||||
> How do I add a new CI agent?
|
||||
|
||||
> How do I add/modify system dependencies?
|
||||
|
||||
> How do I SSH into a CI agent?
|
||||
|
||||
### Known issues
|
||||
|
||||
These are things that we know about, but haven't fixed or optimized yet.
|
||||
|
||||
- There is no `scripts/build-bun-zig.ps1` for Windows.
|
||||
|
||||
- The `build-deps` step does not cache in CI, so it re-builds each time (though it does use ccache). It attempts to check the `BUN_DEPS_CACHE_DIR` environment variable, but for some reason it doesn't work.
|
||||
|
||||
- Windows and Linux machines sometimes take up to 1-2 minutes to start tests. This is because robobun is listening for when the job is scheduled to provision the VM. Instead, it can start provisioning during the link step, or keep a pool of idle VMs around (but it's unclear how more expensive this is).
|
||||
|
||||
- There are a limited number of macOS VMs. This is because they are expensive and manually provisioned, mostly through MacStadium. If wait times are too long we can just provision more, or buy some.
|
||||
|
||||
- To prevent idle machines, robobun periodically checks for idle machines and terminates them. Before doing this, it checks to see if the machine is connected as an agent to Buildkite. However, sometimes the machine picks up a job in-between this time, and the job is terminated.
|
||||
1523
.buildkite/ci.yml
Normal file
1523
.buildkite/ci.yml
Normal file
File diff suppressed because it is too large
Load Diff
94
.buildkite/scripts/upload-release.sh
Executable file
94
.buildkite/scripts/upload-release.sh
Executable file
@@ -0,0 +1,94 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
|
||||
echo "error: Cannot upload release from a pull request"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_BRANCH" != "main" ]; then
|
||||
echo "error: Cannot upload release from a branch other than main"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_gh() {
|
||||
if ! command -v gh &> /dev/null; then
|
||||
echo "warning: gh is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
brew install gh
|
||||
else
|
||||
echo "error: Cannot install gh, please install it:"
|
||||
echo "https://github.com/cli/cli#installation"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_gh_token() {
|
||||
local token=$(buildkite-agent secret get GITHUB_TOKEN)
|
||||
if [ -z "$token" ]; then
|
||||
echo "error: Cannot find GITHUB_TOKEN secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export GH_TOKEN="$token"
|
||||
}
|
||||
|
||||
function download_artifact() {
|
||||
local name=$1
|
||||
buildkite-agent artifact download "$name" .
|
||||
if [ ! -f "$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_assets() {
|
||||
local tag=$1
|
||||
local files=${@:2}
|
||||
gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO"
|
||||
}
|
||||
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_gh
|
||||
assert_gh_token
|
||||
|
||||
declare artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
download_artifact $artifact
|
||||
done
|
||||
|
||||
upload_assets "canary" "${artifacts[@]}"
|
||||
4
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
4
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -1,6 +1,8 @@
|
||||
name: 🐛 Bug Report
|
||||
description: Report an issue that should be fixed
|
||||
labels: [bug]
|
||||
labels:
|
||||
- bug
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
11
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
@@ -6,17 +6,12 @@ body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you so much for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide instructions on how to reproduce the crash.
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: code
|
||||
attributes:
|
||||
label: JavaScript/TypeScript code that reproduces the crash?
|
||||
description: If this crash happened in the Bun runtime, can you paste code we can run to reproduce the crash?
|
||||
render: shell
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
|
||||
27
.github/ISSUE_TEMPLATE/7-install-crash-report.yml
vendored
Normal file
27
.github/ISSUE_TEMPLATE/7-install-crash-report.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: bun install crash report
|
||||
description: Report a crash in bun install
|
||||
labels:
|
||||
- npm
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be
|
||||
automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: remapped_trace
|
||||
attributes:
|
||||
label: Stack Trace (bun.report)
|
||||
validations:
|
||||
required: true
|
||||
43
.github/actions/bump/action.yml
vendored
Normal file
43
.github/actions/bump/action.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Bump version
|
||||
description: Bump the version of Bun
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: The most recent version of Bun.
|
||||
required: true
|
||||
type: string
|
||||
token:
|
||||
description: The GitHub token to use for creating a pull request.
|
||||
required: true
|
||||
type: string
|
||||
default: ${{ github.token }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Run Bump
|
||||
shell: bash
|
||||
id: bump
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MESSAGE=$(bun ./scripts/bump.ts patch --last-version=${{ inputs.version }})
|
||||
LATEST=$(cat LATEST)
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
LATEST
|
||||
token: ${{ inputs.token }}
|
||||
commit-message: Bump version to ${{ steps.bump.outputs.version }}
|
||||
title: Bump to ${{ steps.bump.outputs.version }}
|
||||
delete-branch: true
|
||||
branch: github-actions/bump-version-${{ steps.bump.outputs.version }}--${{ github.run_id }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
${{ steps.bump.outputs.message }}
|
||||
|
||||
Auto-bumped by [this workflow](https://github.com/oven-sh/bun/actions/workflows/release.yml)
|
||||
52
.github/workflows/build-darwin.yml
vendored
52
.github/workflows/build-darwin.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: macos-12-large
|
||||
default: macos-13-large
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
@@ -27,10 +27,12 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
LLVM_VERSION: 18
|
||||
BUN_VERSION: 1.1.8
|
||||
LC_CTYPE: "en_US.UTF-8"
|
||||
LC_ALL: "en_US.UTF-8"
|
||||
# LTO is disabled because we cannot use lld on macOS currently
|
||||
BUN_ENABLE_LTO: "0"
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
@@ -53,16 +55,7 @@ jobs:
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
@@ -86,24 +79,16 @@ jobs:
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
- name: Clone Submodules
|
||||
run: |
|
||||
./scripts/update-submodules.sh
|
||||
- name: Build Submodules
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
./scripts/all-dependencies.sh
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -147,14 +132,6 @@ jobs:
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
@@ -244,18 +221,9 @@ jobs:
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{ runner.temp }}/link-build
|
||||
@@ -265,7 +233,7 @@ jobs:
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
|
||||
-DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
@@ -276,6 +244,12 @@ jobs:
|
||||
chmod +x bun-profile bun
|
||||
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
if [ -f bun-profile.dSYM || -d bun-profile.dSYM ]; then
|
||||
mv bun-profile.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM
|
||||
fi
|
||||
if [ -f bun.dSYM || -d bun.dSYM ]; then
|
||||
mv bun.dSYM bun-${{ inputs.tag }}-profile/bun-profile.dSYM
|
||||
fi
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
|
||||
55
.github/workflows/build-windows.yml
vendored
55
.github/workflows/build-windows.yml
vendored
@@ -31,17 +31,23 @@ on:
|
||||
|
||||
env:
|
||||
# Must specify exact version of LLVM for Windows
|
||||
LLVM_VERSION: 16.0.6
|
||||
LLVM_VERSION: 18.1.8
|
||||
BUN_VERSION: ${{ inputs.bun-version }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: 1
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1
|
||||
CI: true
|
||||
USE_LTO: 1
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install Scoop
|
||||
run: |
|
||||
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
|
||||
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
@@ -72,14 +78,11 @@ jobs:
|
||||
path: bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Ninja
|
||||
name: Install LLVM and Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
scoop install ninja
|
||||
scoop install llvm@${{ env.LLVM_VERSION }}
|
||||
scoop install nasm@2.16.01
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
@@ -89,12 +92,9 @@ jobs:
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
USE_LTO: 1
|
||||
run: |
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
Invoke-WebRequest -Uri "https://www.nasm.us/pub/nasm/releasebuilds/2.16.01/win64/nasm-2.16.01-win64.zip" -OutFile nasm.zip
|
||||
Expand-Archive nasm.zip (mkdir -Force "nasm")
|
||||
$Nasm = (Get-ChildItem "nasm")
|
||||
$env:Path += ";${Nasm}"
|
||||
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
|
||||
.\scripts\all-dependencies.ps1
|
||||
- name: Save Cache
|
||||
@@ -142,6 +142,11 @@ jobs:
|
||||
needs: codegen
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Install Scoop
|
||||
run: |
|
||||
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
|
||||
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
@@ -150,13 +155,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- name: Install Ninja
|
||||
- name: Install LLVM and Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
scoop install ninja
|
||||
scoop install llvm@${{ env.LLVM_VERSION }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
@@ -178,6 +180,7 @@ jobs:
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
USE_LTO: 1
|
||||
run: |
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
@@ -187,6 +190,7 @@ jobs:
|
||||
cd build
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DUSE_LTO=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
@@ -221,6 +225,11 @@ jobs:
|
||||
- build-zig
|
||||
- codegen
|
||||
steps:
|
||||
- name: Install Scoop
|
||||
run: |
|
||||
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
|
||||
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
@@ -229,13 +238,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
scoop install ninja
|
||||
scoop install llvm@${{ env.LLVM_VERSION }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
@@ -283,9 +289,10 @@ jobs:
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
-DUSE_LTO=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ=$(Resolve-Path ../bun-zig/bun-zig.o)" `
|
||||
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" `
|
||||
${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
|
||||
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
uses: ./.github/workflows/run-format.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
zig-version: 0.12.0-dev.1828+225fe6ddb
|
||||
zig-version: 0.13.0
|
||||
permissions:
|
||||
contents: write
|
||||
lint:
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64
|
||||
darwin-x64-baseline-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64-baseline
|
||||
darwin-aarch64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
|
||||
tag: darwin-aarch64
|
||||
windows-x64-test:
|
||||
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
|
||||
|
||||
6
.github/workflows/create-release-build.yml
vendored
6
.github/workflows/create-release-build.yml
vendored
@@ -90,7 +90,7 @@ jobs:
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-large' || 'macos-13' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-13' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
|
||||
81
.github/workflows/labeled.yml
vendored
Normal file
81
.github/workflows/labeled.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
name: Issue Labeled
|
||||
env:
|
||||
BUN_VERSION: 1.1.13
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
scripts
|
||||
.github
|
||||
CMakeLists.txt
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.13"
|
||||
- name: "add platform and command label"
|
||||
id: add-labels
|
||||
if: github.event.label.name == 'crash'
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "outdated.txt" ]]; then
|
||||
echo "oudated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment needs repro
|
||||
if: github.event.label.name == 'needs repro'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), or [CodeSandbox](https://codesandbox.io/templates/bun). Issues marked with `needs repro` will be closed if they have no activity within 3 days.
|
||||
9
.github/workflows/lint-cpp.yml
vendored
9
.github/workflows/lint-cpp.yml
vendored
@@ -14,10 +14,11 @@ on:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
paths:
|
||||
- ".github/workflows/lint-cpp.yml"
|
||||
- "**/*.cpp"
|
||||
- "src/deps/**/*"
|
||||
- "CMakeLists.txt"
|
||||
|
||||
jobs:
|
||||
lint-cpp:
|
||||
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
48
.github/workflows/release.yml
vendored
48
.github/workflows/release.yml
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
bun-version: "1.1.20"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -270,3 +270,43 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
needs: s3
|
||||
steps:
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
environment: production
|
||||
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
bun-version: "1.1.12"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
token: ${{ github.token }}
|
||||
|
||||
6
.github/workflows/run-format.yml
vendored
6
.github/workflows/run-format.yml
vendored
@@ -22,13 +22,14 @@ jobs:
|
||||
sparse-checkout: |
|
||||
.github
|
||||
src
|
||||
scripts
|
||||
packages
|
||||
test
|
||||
bench
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.8"
|
||||
bun-version: "1.1.20"
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
with:
|
||||
@@ -42,6 +43,9 @@ jobs:
|
||||
- name: Format Zig
|
||||
run: |
|
||||
bun fmt:zig
|
||||
- name: Generate submodule versions
|
||||
run: |
|
||||
bash ./scripts/write-versions.sh
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
2
.github/workflows/run-lint-cpp.yml
vendored
2
.github/workflows/run-lint-cpp.yml
vendored
@@ -17,7 +17,7 @@ on:
|
||||
jobs:
|
||||
lint-cpp:
|
||||
name: Lint C++
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-12' }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
8
.github/workflows/run-test.yml
vendored
8
.github/workflows/run-test.yml
vendored
@@ -78,14 +78,20 @@ jobs:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install
|
||||
- name: Install Dependencies (test)
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install --cwd test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install Dependencies (runner)
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
bun install --cwd packages/bun-internal-test
|
||||
- name: Run Tests
|
||||
@@ -93,6 +99,7 @@ jobs:
|
||||
timeout-minutes: 90
|
||||
shell: bash
|
||||
env:
|
||||
IS_BUN_CI: 1
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_TAG: ${{ inputs.tag }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
@@ -102,6 +109,7 @@ jobs:
|
||||
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
|
||||
TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }}
|
||||
SHELLOPTS: igncr
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
|
||||
- if: ${{ always() }}
|
||||
|
||||
29
.github/workflows/test-bump.yml
vendored
Normal file
29
.github/workflows/test-bump.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Test Bump version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.12"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
token: ${{ github.token }}
|
||||
14
.github/workflows/upload.yml
vendored
14
.github/workflows/upload.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary
|
||||
- name: Upload to S3 (using tag)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
@@ -80,3 +80,15 @@ jobs:
|
||||
bun upgrade --canary
|
||||
# bun upgrade --stable <- to downgrade
|
||||
```
|
||||
# If notifying sentry fails, don't fail the rest of the build.
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ github.event.workflow_run.head_sha || github.sha }}-canary
|
||||
environment: canary
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -15,6 +15,7 @@
|
||||
.vs
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
.zig-cache
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
@@ -54,6 +55,7 @@
|
||||
/test.js
|
||||
/test.ts
|
||||
/testdir
|
||||
/test.zig
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
@@ -142,3 +144,5 @@ yarn.lock
|
||||
zig-cache
|
||||
zig-out
|
||||
test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
|
||||
22
.gitmodules
vendored
22
.gitmodules
vendored
@@ -69,13 +69,6 @@ ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/base64"]
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/ls-hpack"]
|
||||
path = src/deps/ls-hpack
|
||||
url = https://github.com/litespeedtech/ls-hpack.git
|
||||
@@ -83,10 +76,17 @@ ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "zig"]
|
||||
path = src/deps/zig
|
||||
url = https://github.com/oven-sh/zig
|
||||
branch = bun
|
||||
[submodule "src/deps/libuv"]
|
||||
path = src/deps/libuv
|
||||
url = https://github.com/libuv/libuv.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
branch = v1.48.0
|
||||
[submodule "zig"]
|
||||
path = src/deps/zig
|
||||
url = https://github.com/oven-sh/zig
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
|
||||
91
.vscode/launch.json
generated
vendored
91
.vscode/launch.json
generated
vendored
@@ -17,8 +17,7 @@
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
@@ -34,9 +33,16 @@
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
"BUN_DEBUG_FileReader": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"name": "Attach",
|
||||
"request": "attach",
|
||||
"pid": "${command:pickMyProcess}",
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -139,12 +145,14 @@
|
||||
"request": "launch",
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/bun-debug",
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"args": ["run", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_EventLoop": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_DEBUG_ALL": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
@@ -438,13 +446,16 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [*] (ci)",
|
||||
"program": "node",
|
||||
"args": ["src/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}/packages/bun-internal-test",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -472,6 +483,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test --only [file]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -510,6 +524,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] (fast)",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -532,6 +549,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] (verbose)",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -554,6 +574,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] --inspect",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -585,6 +608,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [file] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -617,6 +643,9 @@
|
||||
// Windows: bun run [file]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -639,6 +668,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun install",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -658,6 +690,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file] (verbose)",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -680,6 +715,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file] --inspect",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -711,6 +749,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -743,6 +784,9 @@
|
||||
// Windows: bun test [...]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -765,6 +809,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] (fast)",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -787,6 +834,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] (verbose)",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -809,6 +859,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --watch",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -831,6 +884,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --hot",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -853,6 +909,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --inspect",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -884,6 +943,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [...] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -916,6 +978,9 @@
|
||||
// Windows: bun exec [...]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun exec [...]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -939,6 +1004,9 @@
|
||||
// Windows: bun test [*]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [*]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -961,6 +1029,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [*] (fast)",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -983,6 +1054,9 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [*] --inspect",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
@@ -1014,11 +1088,14 @@
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"sourceFileMap": {
|
||||
"D:\\a\\WebKit\\WebKit\\Source": "${workspaceFolder}\\src\\bun.js\\WebKit\\Source",
|
||||
},
|
||||
"request": "launch",
|
||||
"name": "Windows: bun test [*] (ci)",
|
||||
"program": "node",
|
||||
"args": ["src/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}/packages/bun-internal-test",
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
},
|
||||
],
|
||||
|
||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -26,8 +26,12 @@
|
||||
|
||||
// Zig
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOnSave": false,
|
||||
"zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/src/deps/zig/lib",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/codegen"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
// "zig.buildOnSave": true,
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.path": "${workspaceFolder}/.cache/zig/zig.exe",
|
||||
"zig.formattingProvider": "zls",
|
||||
@@ -147,4 +151,5 @@
|
||||
"WebKit/WebKitBuild": true,
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
}
|
||||
|
||||
244
CMakeLists.txt
244
CMakeLists.txt
@@ -2,8 +2,9 @@ cmake_minimum_required(VERSION 3.22)
|
||||
cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(Bun_VERSION "1.1.9")
|
||||
set(WEBKIT_TAG 2c4f31e10974404bc8316a70d491ec0f400c880d)
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0069 NEW)
|
||||
set(Bun_VERSION "1.1.21")
|
||||
set(WEBKIT_TAG 49907bff8781719bc2ded068b0c934f6d0074d1e)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -14,10 +15,14 @@ set(CMAKE_C_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_C_STANDARD_REQUIRED ON)
|
||||
|
||||
# Used in process.version, process.versions.node, napi, and elsewhere
|
||||
set(REPORTED_NODEJS_VERSION "22.3.0")
|
||||
|
||||
# WebKit uses -std=gnu++20 on non-macOS non-Windows
|
||||
# If we do not set this, it will crash at startup on the first memory allocation.
|
||||
if(NOT WIN32 AND NOT APPLE)
|
||||
set(CMAKE_CXX_EXTENSIONS ON)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE FALSE)
|
||||
endif()
|
||||
|
||||
# --- Build Type ---
|
||||
@@ -34,6 +39,13 @@ else()
|
||||
message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX)
|
||||
# workaround until cmake fix is shipped https://github.com/ninja-build/ninja/issues/2280
|
||||
# './build/.ninja_deps' may need to be deleted, the bug is "Note: including file: ..." is saved
|
||||
# as part of some file paths
|
||||
set(CMAKE_CL_SHOWINCLUDES_PREFIX "Note: including file:")
|
||||
endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(DEBUG ON)
|
||||
set(DEFAULT_ZIG_OPTIMIZE "Debug")
|
||||
@@ -46,11 +58,8 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
|
||||
|
||||
if(WIN32)
|
||||
# lld-link will strip it for you, so we can build directly to bun.exe
|
||||
# Debug symbols are in a separate file: bun.pdb
|
||||
set(bun "bun")
|
||||
|
||||
# TODO(@paperdave): Remove this
|
||||
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
|
||||
else()
|
||||
if(ZIG_OPTIMIZE STREQUAL "Debug")
|
||||
@@ -63,7 +72,7 @@ endif()
|
||||
|
||||
# --- MacOS SDK ---
|
||||
if(APPLE AND DEFINED ENV{CI})
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0")
|
||||
endif()
|
||||
|
||||
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
|
||||
@@ -107,7 +116,11 @@ endif()
|
||||
# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match.
|
||||
#
|
||||
# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid.
|
||||
set(LLVM_VERSION 16)
|
||||
if(WIN32 OR APPLE)
|
||||
set(LLVM_VERSION 18)
|
||||
else()
|
||||
set(LLVM_VERSION 16)
|
||||
endif()
|
||||
|
||||
macro(BUN_FIND_LLVM)
|
||||
find_program(
|
||||
@@ -141,11 +154,12 @@ macro(BUN_FIND_LLVM)
|
||||
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
|
||||
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
|
||||
)
|
||||
|
||||
find_program(
|
||||
STRIP
|
||||
NAMES strip
|
||||
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
|
||||
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
|
||||
DOC "Path to strip binary"
|
||||
)
|
||||
find_program(
|
||||
DSYMUTIL
|
||||
@@ -301,12 +315,12 @@ option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON)
|
||||
option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON)
|
||||
option(USE_CUSTOM_ZSTD "Use Bun's recommended version of zstd" ON)
|
||||
option(USE_CUSTOM_CARES "Use Bun's recommended version of c-ares" ON)
|
||||
option(USE_CUSTOM_BASE64 "Use Bun's recommended version of libbase64" ON)
|
||||
option(USE_CUSTOM_LOLHTML "Use Bun's recommended version of lolhtml" ON)
|
||||
option(USE_CUSTOM_TINYCC "Use Bun's recommended version of tinycc" ON)
|
||||
option(USE_CUSTOM_LIBUV "Use Bun's recommended version of libuv (Windows only)" ON)
|
||||
option(USE_CUSTOM_LSHPACK "Use Bun's recommended version of ls-hpack" ON)
|
||||
option(USE_BASELINE_BUILD "Build Bun for baseline (older) CPUs" OFF)
|
||||
option(USE_SYSTEM_ICU "Use the system-provided libicu. May fix startup crashes when building WebKit yourself." OFF)
|
||||
|
||||
option(USE_VALGRIND "Build Bun with Valgrind support (Linux only)" OFF)
|
||||
|
||||
@@ -317,10 +331,19 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
|
||||
|
||||
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
|
||||
|
||||
if(APPLE AND USE_LTO)
|
||||
set(USE_LTO OFF)
|
||||
message(WARNING "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
|
||||
endif()
|
||||
|
||||
if(WIN32 AND USE_LTO)
|
||||
set(CMAKE_LINKER_TYPE LLD)
|
||||
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF)
|
||||
endif()
|
||||
|
||||
option(BUN_TIDY_ONLY "Only run clang-tidy" OFF)
|
||||
option(BUN_TIDY_ONLY_EXTRA " Only run clang-tidy, with extra checks for local development" OFF)
|
||||
|
||||
|
||||
if(NOT ZIG_LIB_DIR)
|
||||
cmake_path(SET ZIG_LIB_DIR NORMALIZE "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/zig/lib")
|
||||
endif()
|
||||
@@ -340,6 +363,10 @@ if(NOT CANARY)
|
||||
set(CANARY 0)
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_LOGS)
|
||||
set(ENABLE_LOGS false)
|
||||
endif()
|
||||
|
||||
if(NOT ZIG_OPTIMIZE)
|
||||
set(ZIG_OPTIMIZE ${DEFAULT_ZIG_OPTIMIZE})
|
||||
endif()
|
||||
@@ -607,7 +634,7 @@ set(BUN_DEPS_DIR "${BUN_SRC}/deps")
|
||||
set(BUN_CODEGEN_SRC "${BUN_SRC}/codegen")
|
||||
|
||||
if(NOT BUN_DEPS_OUT_DIR)
|
||||
set(BUN_DEPS_OUT_DIR "${BUN_DEPS_DIR}")
|
||||
set(BUN_DEPS_OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/build/bun-deps")
|
||||
endif()
|
||||
|
||||
set(BUN_RAW_SOURCES, "")
|
||||
@@ -625,16 +652,6 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES})
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
# -- uSockets --
|
||||
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
|
||||
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
|
||||
@@ -765,6 +782,8 @@ if(NOT NO_CODEGEN)
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
"${BUN_SRC}/js/internal/*.js"
|
||||
"${BUN_SRC}/js/internal/*.ts"
|
||||
"${BUN_SRC}/js/internal/util/*.js"
|
||||
"${BUN_SRC}/js/internal/fs/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
@@ -848,11 +867,24 @@ file(GLOB ZIG_FILES
|
||||
"${BUN_SRC}/*/*/*/*/*.zig"
|
||||
)
|
||||
|
||||
if(NOT BUN_ZIG_OBJ)
|
||||
set(BUN_ZIG_OBJ "${BUN_WORKDIR}/CMakeFiles/bun-zig.o")
|
||||
if(NOT BUN_ZIG_OBJ_FORMAT)
|
||||
# To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
|
||||
# LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
|
||||
# Change to "bc" to experiment, "Invalid record" means it is not valid output.
|
||||
set(BUN_ZIG_OBJ_FORMAT "obj")
|
||||
endif()
|
||||
|
||||
get_filename_component(BUN_ZIG_OBJ "${BUN_ZIG_OBJ}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}")
|
||||
if(NOT BUN_ZIG_OBJ_DIR)
|
||||
set(BUN_ZIG_OBJ_DIR "${BUN_WORKDIR}/CMakeFiles")
|
||||
endif()
|
||||
|
||||
get_filename_component(BUN_ZIG_OBJ_DIR "${BUN_ZIG_OBJ_DIR}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}")
|
||||
|
||||
if(WIN32)
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
else()
|
||||
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
|
||||
endif()
|
||||
|
||||
set(USES_TERMINAL_NOT_IN_CI "")
|
||||
|
||||
@@ -866,7 +898,8 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
COMMAND
|
||||
"${ZIG_COMPILER}" "build" "obj"
|
||||
"--zig-lib-dir" "${ZIG_LIB_DIR}"
|
||||
"-Doutput-file=${BUN_ZIG_OBJ}"
|
||||
"--prefix" "${BUN_ZIG_OBJ_DIR}"
|
||||
"--verbose"
|
||||
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
|
||||
"-freference-trace=10"
|
||||
"-Dversion=${Bun_VERSION}"
|
||||
@@ -874,6 +907,9 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"-Doptimize=${ZIG_OPTIMIZE}"
|
||||
"-Dcpu=${CPU_TARGET}"
|
||||
"-Dtarget=${ZIG_TARGET}"
|
||||
"-Denable_logs=${ENABLE_LOGS}"
|
||||
"-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}"
|
||||
"-Dobj_format=${BUN_ZIG_OBJ_FORMAT}"
|
||||
DEPENDS
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
|
||||
"${ZIG_FILES}"
|
||||
@@ -937,12 +973,15 @@ set_target_properties(${bun} PROPERTIES
|
||||
VISIBILITY_INLINES_HIDDEN YES
|
||||
)
|
||||
|
||||
if(APPLE)
|
||||
add_compile_definitions("__DARWIN_NON_CANCELABLE=1")
|
||||
endif()
|
||||
|
||||
add_compile_definitions(
|
||||
|
||||
# TODO: are all of these variables strictly necessary?
|
||||
"_HAS_EXCEPTIONS=0"
|
||||
"LIBUS_USE_OPENSSL=1"
|
||||
"UWS_HTTPRESPONSE_NO_WRITEMARK=1"
|
||||
"LIBUS_USE_BORINGSSL=1"
|
||||
"WITH_BORINGSSL=1"
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1"
|
||||
@@ -956,6 +995,7 @@ add_compile_definitions(
|
||||
"IS_BUILD"
|
||||
"BUILDING_JSCONLY__"
|
||||
"BUN_DYNAMIC_JS_LOAD_PATH=\"${BUN_WORKDIR}/js\""
|
||||
"REPORTED_NODEJS_VERSION=\"${REPORTED_NODEJS_VERSION}\""
|
||||
)
|
||||
|
||||
if(NOT ASSERT_ENABLED)
|
||||
@@ -1046,7 +1086,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
if(NOT WIN32)
|
||||
if(USE_LTO)
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables")
|
||||
endif()
|
||||
|
||||
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
|
||||
@@ -1065,13 +1105,38 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(LTO_LINK_FLAG "")
|
||||
|
||||
if(USE_LTO)
|
||||
# -emit-llvm seems to not be supported or under a different name on Windows.
|
||||
target_compile_options(${bun} PUBLIC -Xclang -emit-llvm-bc)
|
||||
|
||||
list(APPEND LTO_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "/LTCG")
|
||||
list(APPEND LTO_LINK_FLAG "/OPT:REF")
|
||||
list(APPEND LTO_LINK_FLAG "/OPT:NOICF")
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG} /DEBUG:FULL)
|
||||
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG:FULL)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
/O2
|
||||
${LTO_FLAG}
|
||||
/Gy
|
||||
/Gw
|
||||
/GF
|
||||
/GA
|
||||
)
|
||||
target_link_options(${bun} PUBLIC
|
||||
${LTO_LINK_FLAG}
|
||||
/DEBUG:FULL
|
||||
|
||||
/delayload:ole32.dll
|
||||
/delayload:WINMM.dll
|
||||
/delayload:dbghelp.dll
|
||||
/delayload:VCRUNTIME140_1.dll
|
||||
|
||||
# libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them
|
||||
/delayload:WS2_32.dll
|
||||
/delayload:WSOCK32.dll
|
||||
/delayload:ADVAPI32.dll
|
||||
/delayload:IPHLPAPI.dll
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -1089,6 +1154,11 @@ else()
|
||||
# On arm macOS, we can set it to a minimum of the M1 cpu set. this might be the default already.
|
||||
target_compile_options(${bun} PUBLIC "-mcpu=apple-m1")
|
||||
endif()
|
||||
|
||||
if(NOT WIN32 AND NOT APPLE AND ARCH STREQUAL "aarch64")
|
||||
# on arm64 linux, we set a minimum of armv8
|
||||
target_compile_options(${bun} PUBLIC -march=armv8-a+crc -mtune=ampere1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC -ferror-limit=${ERROR_LIMIT})
|
||||
@@ -1102,23 +1172,29 @@ if(WIN32)
|
||||
"BORINGSSL_NO_CXX=1" # lol
|
||||
)
|
||||
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-")
|
||||
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000")
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors)
|
||||
|
||||
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0")
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fPIC
|
||||
-mtune=${CPU_TARGET}
|
||||
-fconstexpr-steps=2542484
|
||||
-fconstexpr-depth=54
|
||||
-fno-exceptions
|
||||
-fno-asynchronous-unwind-tables
|
||||
-fno-unwind-tables
|
||||
-fno-c++-static-destructors
|
||||
-fvisibility=hidden
|
||||
-fvisibility-inlines-hidden
|
||||
-fno-rtti
|
||||
-fno-omit-frame-pointer
|
||||
-mno-omit-leaf-frame-pointer
|
||||
-fno-pic
|
||||
-fno-pie
|
||||
-faddrsig
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1128,17 +1204,18 @@ if(APPLE)
|
||||
target_link_options(${bun} PUBLIC "-Wl,-stack_size,0x1200000")
|
||||
target_link_options(${bun} PUBLIC "-exported_symbols_list" "${BUN_SRC}/symbols.txt")
|
||||
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.txt")
|
||||
|
||||
target_link_options(${bun} PUBLIC "-fno-keep-static-consts")
|
||||
target_link_libraries(${bun} PRIVATE "resolv")
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
"-fuse-ld=lld"
|
||||
"-static-libstdc++"
|
||||
"-static-libgcc"
|
||||
"-Wl,-z,now"
|
||||
-fuse-ld=lld
|
||||
-fno-pic
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
"-Wl,-no-pie"
|
||||
"-Wl,-icf=safe"
|
||||
"-Wl,--as-needed"
|
||||
"-Wl,--gc-sections"
|
||||
"-Wl,-z,stack-size=12800000"
|
||||
@@ -1167,6 +1244,8 @@ if(UNIX AND NOT APPLE)
|
||||
"-rdynamic"
|
||||
"-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn"
|
||||
"-Wl,--version-script=${BUN_SRC}/linker.lds"
|
||||
-Wl,-z,lazy
|
||||
-Wl,-z,norelro
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE "c")
|
||||
@@ -1179,9 +1258,15 @@ if(UNIX AND NOT APPLE)
|
||||
target_link_libraries(${bun} PRIVATE "libatomic.a")
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicudata.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicui18n.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicuuc.a")
|
||||
if(USE_SYSTEM_ICU)
|
||||
target_link_libraries(${bun} PRIVATE "libicudata.a")
|
||||
target_link_libraries(${bun} PRIVATE "libicui18n.a")
|
||||
target_link_libraries(${bun} PRIVATE "libicuuc.a")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicudata.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicui18n.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libicuuc.a")
|
||||
endif()
|
||||
|
||||
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/linker.lds")
|
||||
set_target_properties(${bun} PROPERTIES LINK_DEPENDS "${BUN_SRC}/symbols.dyn")
|
||||
@@ -1194,12 +1279,16 @@ endif()
|
||||
|
||||
# --- Stripped Binary "bun"
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED)
|
||||
# add_custom_command(
|
||||
# TARGET ${bun}
|
||||
# POST_BUILD
|
||||
# COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/bun.dSYM ${BUN_WORKDIR}/${bun}
|
||||
# COMMENT "Stripping Symbols"
|
||||
# )
|
||||
# if(CI AND APPLE)
|
||||
if(APPLE)
|
||||
add_custom_command(
|
||||
TARGET ${bun}
|
||||
POST_BUILD
|
||||
COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun}
|
||||
COMMENT "Generating .dSYM"
|
||||
)
|
||||
endif()
|
||||
|
||||
add_custom_command(
|
||||
TARGET ${bun}
|
||||
POST_BUILD
|
||||
@@ -1277,11 +1366,11 @@ if(USE_CUSTOM_MIMALLOC)
|
||||
elseif(APPLE)
|
||||
if(USE_DEBUG_JSC OR CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(STATUS "Using debug mimalloc")
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc-debug.a")
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc-debug.o")
|
||||
else()
|
||||
# https://github.com/microsoft/mimalloc/issues/512
|
||||
# Linking mimalloc via object file on macOS x64 can cause heap corruption
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.a")
|
||||
# Note: https://github.com/microsoft/mimalloc/issues/512
|
||||
# It may have been a bug in our code at the time.
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.o")
|
||||
endif()
|
||||
else()
|
||||
if(USE_DEBUG_JSC OR CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
@@ -1322,19 +1411,6 @@ else()
|
||||
target_link_libraries(${bun} PRIVATE c-ares::cares)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_BASE64)
|
||||
include_directories(${BUN_DEPS_DIR}/base64/include)
|
||||
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/base64.lib")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libbase64.a")
|
||||
endif()
|
||||
else()
|
||||
find_package(base64 REQUIRED)
|
||||
target_link_libraries(${bun} PRIVATE base64::base64)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_TINYCC)
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/tcc.lib")
|
||||
@@ -1380,6 +1456,11 @@ if(USE_STATIC_SQLITE)
|
||||
"SQLITE_ENABLE_JSON1=1"
|
||||
"SQLITE_ENABLE_MATH_FUNCTIONS=1"
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
target_compile_options(sqlite3 PRIVATE /MT /U_DLL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE sqlite3)
|
||||
message(STATUS "Using static sqlite3")
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0")
|
||||
@@ -1388,6 +1469,24 @@ else()
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
|
||||
endif()
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
add_library(brotli STATIC ${BROTLI_FILES})
|
||||
target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include")
|
||||
target_compile_definitions(brotli PRIVATE "BROTLI_STATIC")
|
||||
|
||||
if(WIN32)
|
||||
target_compile_options(brotli PRIVATE /MT /U_DLL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(${bun} PRIVATE brotli)
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
if(USE_CUSTOM_LSHPACK)
|
||||
include_directories(${BUN_DEPS_DIR}/ls-hpack)
|
||||
|
||||
@@ -1407,7 +1506,6 @@ if(NOT WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
|
||||
else()
|
||||
target_link_options(${bun} PRIVATE "-static")
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
"${WEBKIT_LIB_DIR}/WTF.lib"
|
||||
"${WEBKIT_LIB_DIR}/JavaScriptCore.lib"
|
||||
@@ -1417,17 +1515,13 @@ else()
|
||||
winmm
|
||||
bcrypt
|
||||
ntdll
|
||||
ucrt
|
||||
userenv
|
||||
dbghelp
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
delayimp.lib
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# delayimp -delayload:shell32.dll -delayload:ole32.dll
|
||||
endif()
|
||||
|
||||
if(BUN_LINK_ONLY)
|
||||
message(STATUS "NOTE: BUN_LINK_ONLY is ON, this build config will only link the Bun executable")
|
||||
endif()
|
||||
@@ -1446,8 +1540,8 @@ if(BUN_TIDY_ONLY)
|
||||
set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")
|
||||
endif()
|
||||
|
||||
if (BUN_TIDY_ONLY_EXTRA)
|
||||
if(BUN_TIDY_ONLY_EXTRA)
|
||||
find_program(CLANG_TIDY_EXE NAMES "clang-tidy")
|
||||
set(CLANG_TIDY_COMMAND "${CLANG_TIDY_EXE}" "-checks=-*,clang-analyzer-*,performance-*,-clang-analyzer-webkit.UncountedLambdaCapturesChecker" "--fix" "--fix-errors" "--format-style=webkit" "--warnings-as-errors=*")
|
||||
set_target_properties(${bun} PROPERTIES CXX_CLANG_TIDY "${CLANG_TIDY_COMMAND}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@@ -2,6 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
|
||||
{% /details %}
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
@@ -107,7 +112,7 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin"
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> ⚠️ Ubuntu distributions may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
|
||||
## Building Bun
|
||||
|
||||
@@ -311,3 +316,12 @@ $ bun setup -DUSE_STATIC_LIBATOMIC=OFF
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
## ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
78
Dockerfile
78
Dockerfile
@@ -25,7 +25,9 @@ ARG CMAKE_BUILD_TYPE=Release
|
||||
|
||||
ARG NODE_VERSION="20"
|
||||
ARG LLVM_VERSION="16"
|
||||
ARG ZIG_VERSION="0.12.0-dev.1828+225fe6ddb"
|
||||
|
||||
ARG ZIG_VERSION="0.13.0"
|
||||
ARG ZIG_VERSION_SHORT="0.13.0"
|
||||
|
||||
ARG SCCACHE_BUCKET
|
||||
ARG SCCACHE_REGION
|
||||
@@ -50,11 +52,8 @@ ENV CI 1
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ENV BUILDARCH=${BUILDARCH}
|
||||
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
|
||||
ENV BUN_ENABLE_LTO 1
|
||||
|
||||
ENV CXX=clang++-${LLVM_VERSION}
|
||||
ENV CC=clang-${LLVM_VERSION}
|
||||
ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION}
|
||||
ENV LD=lld-${LLVM_VERSION}
|
||||
ENV LC_CTYPE=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
|
||||
@@ -91,6 +90,8 @@ RUN install_packages \
|
||||
clangd-${LLVM_VERSION} \
|
||||
libc++-${LLVM_VERSION}-dev \
|
||||
libc++abi-${LLVM_VERSION}-dev \
|
||||
llvm-${LLVM_VERSION}-runtime \
|
||||
llvm-${LLVM_VERSION}-dev \
|
||||
make \
|
||||
cmake \
|
||||
ninja-build \
|
||||
@@ -117,6 +118,15 @@ RUN install_packages \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
|
||||
&& ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/cc \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/c89 \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/c99 \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/c++ \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/g++ \
|
||||
&& ln -sf /usr/bin/llvm-ar /usr/bin/ar \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/gcc \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) variant="x64";; \
|
||||
@@ -129,6 +139,7 @@ RUN install_packages \
|
||||
&& ln -s /usr/bin/bun /usr/bin/bunx \
|
||||
&& rm -rf bun-linux-${variant} bun-linux-${variant}.zip \
|
||||
&& mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
# && if [ -n "${SCCACHE_BUCKET}" ]; then \
|
||||
# echo "Setting up sccache" \
|
||||
# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
|
||||
@@ -139,6 +150,7 @@ RUN install_packages \
|
||||
FROM bun-base as bun-base-with-zig
|
||||
|
||||
ARG ZIG_VERSION
|
||||
ARG ZIG_VERSION_SHORT
|
||||
ARG BUILD_MACHINE_ARCH
|
||||
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
|
||||
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
|
||||
@@ -164,13 +176,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make c-ares \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
|
||||
&& bash ./scripts/build-cares.sh \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts
|
||||
|
||||
FROM bun-base as lolhtml
|
||||
|
||||
@@ -201,13 +214,14 @@ ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make mimalloc \
|
||||
&& bash ./scripts/build-mimalloc.sh \
|
||||
&& rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bun-base as mimalloc-debug
|
||||
@@ -237,14 +251,17 @@ ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zlib \
|
||||
&& rm -rf src/deps/zlib Makefile
|
||||
&& bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts
|
||||
|
||||
|
||||
FROM bun-base as libarchive
|
||||
|
||||
@@ -256,15 +273,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN install_packages autoconf automake libtool pkg-config
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make libarchive \
|
||||
&& rm -rf src/deps/libarchive Makefile
|
||||
&& bash ./scripts/build-libarchive.sh && rm -rf src/deps/libarchive .scripts
|
||||
|
||||
FROM bun-base as tinycc
|
||||
|
||||
@@ -284,6 +300,7 @@ ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
@@ -293,22 +310,9 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make boringssl \
|
||||
&& bash ./scripts/build-boringssl.sh \
|
||||
&& rm -rf src/deps/boringssl Makefile
|
||||
|
||||
FROM bun-base as base64
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make base64 && rm -rf src/deps/base64 Makefile
|
||||
|
||||
FROM bun-base as zstd
|
||||
|
||||
@@ -322,12 +326,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zstd
|
||||
&& bash ./scripts/build-zstd.sh \
|
||||
&& rm -rf src/deps/zstd scripts
|
||||
|
||||
FROM bun-base as ls-hpack
|
||||
|
||||
@@ -341,12 +347,14 @@ ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
|
||||
COPY scripts ${BUN_DIR}/scripts
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make lshpack
|
||||
&& bash ./scripts/build-lshpack.sh \
|
||||
&& rm -rf src/deps/ls-hpack scripts
|
||||
|
||||
FROM bun-base-with-zig as bun-identifier-cache
|
||||
|
||||
@@ -471,7 +479,7 @@ RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
-DWEBKIT_DIR="omit" \
|
||||
-DNO_CONFIGURE_DEPENDS=1 \
|
||||
-DNO_CODEGEN=1 \
|
||||
-DBUN_ZIG_OBJ="/tmp/bun-zig.o" \
|
||||
-DBUN_ZIG_OBJ_DIR="/tmp" \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
-DZIG_LIB_DIR=$BUN_DIR/src/deps/zig/lib \
|
||||
@@ -502,12 +510,12 @@ RUN mkdir -p build bun-webkit
|
||||
|
||||
# lol
|
||||
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli
|
||||
|
||||
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
@@ -517,7 +525,8 @@ COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
|
||||
WORKDIR $BUN_DIR/build
|
||||
@@ -528,7 +537,7 @@ RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
|
||||
-DBUN_ZIG_OBJ_DIR="${BUN_DIR}/build" \
|
||||
-DUSE_LTO=ON \
|
||||
-DUSE_DEBUG_JSC=${ASSERTIONS} \
|
||||
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
|
||||
@@ -571,7 +580,6 @@ COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
@@ -591,7 +599,7 @@ RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
|
||||
-DBUN_ZIG_OBJ_DIR="${BUN_DIR}/build" \
|
||||
-DUSE_DEBUG_JSC=ON \
|
||||
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
|
||||
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
|
||||
|
||||
229
LICENSE
229
LICENSE
@@ -1,229 +0,0 @@
|
||||
Bun itself is MIT-licensed.
|
||||
|
||||
## JavaScriptCore
|
||||
|
||||
Bun statically links JavaScriptCore (and WebKit) which is LGPL-2 licensed. WebCore files from WebKit are also licensed under LGPL2. Per LGPL2:
|
||||
|
||||
> (1) If you statically link against an LGPL’d library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application.
|
||||
|
||||
You can find the patched version of WebKit used by Bun here: <https://github.com/oven-sh/webkit>. If you would like to relink Bun with changes:
|
||||
|
||||
- `git submodule update --init --recursive`
|
||||
- `make jsc`
|
||||
- `zig build`
|
||||
|
||||
This compiles JavaScriptCore, compiles Bun’s `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
|
||||
|
||||
## Linked libraries
|
||||
|
||||
Bun statically links these libraries:
|
||||
|
||||
{% table %}
|
||||
|
||||
- Library
|
||||
- License
|
||||
|
||||
---
|
||||
|
||||
- [`boringssl`](https://boringssl.googlesource.com/boringssl/)
|
||||
- [several licenses](https://boringssl.googlesource.com/boringssl/+/refs/heads/master/LICENSE)
|
||||
|
||||
---
|
||||
|
||||
---
|
||||
|
||||
- [`brotli`](https://github.com/google/brotli)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`libarchive`](https://github.com/libarchive/libarchive)
|
||||
- [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING)
|
||||
|
||||
---
|
||||
|
||||
- [`lol-html`](https://github.com/cloudflare/lol-html/tree/master/c-api)
|
||||
- BSD 3-Clause
|
||||
|
||||
---
|
||||
|
||||
- [`mimalloc`](https://github.com/microsoft/mimalloc)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`picohttp`](https://github.com/h2o/picohttpparser)
|
||||
- dual-licensed under the Perl License or the MIT License
|
||||
|
||||
---
|
||||
|
||||
- [`zstd`](https://github.com/facebook/zstd)
|
||||
- dual-licensed under the BSD License or GPLv2 license
|
||||
|
||||
---
|
||||
|
||||
- [`simdutf`](https://github.com/simdutf/simdutf)
|
||||
- Apache 2.0
|
||||
|
||||
---
|
||||
|
||||
- [`tinycc`](https://github.com/tinycc/tinycc)
|
||||
- LGPL v2.1
|
||||
|
||||
---
|
||||
|
||||
- [`uSockets`](https://github.com/uNetworking/uSockets)
|
||||
- Apache 2.0
|
||||
|
||||
---
|
||||
|
||||
- [`zlib-cloudflare`](https://github.com/cloudflare/zlib)
|
||||
- zlib
|
||||
|
||||
---
|
||||
|
||||
- [`c-ares`](https://github.com/c-ares/c-ares)
|
||||
- MIT licensed
|
||||
|
||||
---
|
||||
|
||||
- [`libicu`](https://github.com/unicode-org/icu) 72
|
||||
- [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE)
|
||||
|
||||
---
|
||||
|
||||
- [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE)
|
||||
- BSD 2-Clause
|
||||
|
||||
---
|
||||
|
||||
- A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets)
|
||||
- Apache 2.0 licensed
|
||||
|
||||
---
|
||||
|
||||
- Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1)
|
||||
- Apache 2.0 licensed
|
||||
|
||||
{% /table %}
|
||||
|
||||
## Polyfills
|
||||
|
||||
For compatibility reasons, the following packages are embedded into Bun's binary and injected if imported.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Package
|
||||
- License
|
||||
|
||||
---
|
||||
|
||||
- [`assert`](https://npmjs.com/package/assert)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`browserify-zlib`](https://npmjs.com/package/browserify-zlib)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`buffer`](https://npmjs.com/package/buffer)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`constants-browserify`](https://npmjs.com/package/constants-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`crypto-browserify`](https://npmjs.com/package/crypto-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`domain-browser`](https://npmjs.com/package/domain-browser)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`events`](https://npmjs.com/package/events)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`https-browserify`](https://npmjs.com/package/https-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`os-browserify`](https://npmjs.com/package/os-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`path-browserify`](https://npmjs.com/package/path-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`process`](https://npmjs.com/package/process)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`punycode`](https://npmjs.com/package/punycode)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`querystring-es3`](https://npmjs.com/package/querystring-es3)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`stream-browserify`](https://npmjs.com/package/stream-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`stream-http`](https://npmjs.com/package/stream-http)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`string_decoder`](https://npmjs.com/package/string_decoder)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`timers-browserify`](https://npmjs.com/package/timers-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`tty-browserify`](https://npmjs.com/package/tty-browserify)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`url`](https://npmjs.com/package/url)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`util`](https://npmjs.com/package/util)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`vm-browserify`](https://npmjs.com/package/vm-browserify)
|
||||
- MIT
|
||||
|
||||
{% /table %}
|
||||
|
||||
## Additional credits
|
||||
|
||||
- Bun's JS transpiler, CSS lexer, and Node.js module resolver source code is a Zig port of [@evanw](https://github.com/evanw)’s [esbuild](https://github.com/evanw/esbuild) project.
|
||||
- Credit to [@kipply](https://github.com/kipply) for the name "Bun"!
|
||||
71
LICENSE.md
Normal file
71
LICENSE.md
Normal file
@@ -0,0 +1,71 @@
|
||||
Bun itself is MIT-licensed.
|
||||
|
||||
## JavaScriptCore
|
||||
|
||||
Bun statically links JavaScriptCore (and WebKit) which is LGPL-2 licensed. WebCore files from WebKit are also licensed under LGPL2. Per LGPL2:
|
||||
|
||||
> (1) If you statically link against an LGPL’d library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application.
|
||||
|
||||
You can find the patched version of WebKit used by Bun here: <https://github.com/oven-sh/webkit>. If you would like to relink Bun with changes:
|
||||
|
||||
- `git submodule update --init --recursive`
|
||||
- `make jsc`
|
||||
- `zig build`
|
||||
|
||||
This compiles JavaScriptCore, compiles Bun’s `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
|
||||
|
||||
## Linked libraries
|
||||
|
||||
Bun statically links these libraries:
|
||||
|
||||
| Library | License |
|
||||
|---------|---------|
|
||||
| [`boringssl`](https://boringssl.googlesource.com/boringssl/) | [several licenses](https://boringssl.googlesource.com/boringssl/+/refs/heads/master/LICENSE) |
|
||||
| [`brotli`](https://github.com/google/brotli) | MIT |
|
||||
| [`libarchive`](https://github.com/libarchive/libarchive) | [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING) |
|
||||
| [`lol-html`](https://github.com/cloudflare/lol-html/tree/master/c-api) | BSD 3-Clause |
|
||||
| [`mimalloc`](https://github.com/microsoft/mimalloc) | MIT |
|
||||
| [`picohttp`](https://github.com/h2o/picohttpparser) | dual-licensed under the Perl License or the MIT License |
|
||||
| [`zstd`](https://github.com/facebook/zstd) | dual-licensed under the BSD License or GPLv2 license |
|
||||
| [`simdutf`](https://github.com/simdutf/simdutf) | Apache 2.0 |
|
||||
| [`tinycc`](https://github.com/tinycc/tinycc) | LGPL v2.1 |
|
||||
| [`uSockets`](https://github.com/uNetworking/uSockets) | Apache 2.0 |
|
||||
| [`zlib-cloudflare`](https://github.com/cloudflare/zlib) | zlib |
|
||||
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
|
||||
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
## Polyfills
|
||||
|
||||
For compatibility reasons, the following packages are embedded into Bun's binary and injected if imported.
|
||||
|
||||
| Package | License |
|
||||
|---------|---------|
|
||||
| [`assert`](https://npmjs.com/package/assert) | MIT |
|
||||
| [`browserify-zlib`](https://npmjs.com/package/browserify-zlib) | MIT |
|
||||
| [`buffer`](https://npmjs.com/package/buffer) | MIT |
|
||||
| [`constants-browserify`](https://npmjs.com/package/constants-browserify) | MIT |
|
||||
| [`crypto-browserify`](https://npmjs.com/package/crypto-browserify) | MIT |
|
||||
| [`domain-browser`](https://npmjs.com/package/domain-browser) | MIT |
|
||||
| [`events`](https://npmjs.com/package/events) | MIT |
|
||||
| [`https-browserify`](https://npmjs.com/package/https-browserify) | MIT |
|
||||
| [`os-browserify`](https://npmjs.com/package/os-browserify) | MIT |
|
||||
| [`path-browserify`](https://npmjs.com/package/path-browserify) | MIT |
|
||||
| [`process`](https://npmjs.com/package/process) | MIT |
|
||||
| [`punycode`](https://npmjs.com/package/punycode) | MIT |
|
||||
| [`querystring-es3`](https://npmjs.com/package/querystring-es3) | MIT |
|
||||
| [`stream-browserify`](https://npmjs.com/package/stream-browserify) | MIT |
|
||||
| [`stream-http`](https://npmjs.com/package/stream-http) | MIT |
|
||||
| [`string_decoder`](https://npmjs.com/package/string_decoder) | MIT |
|
||||
| [`timers-browserify`](https://npmjs.com/package/timers-browserify) | MIT |
|
||||
| [`tty-browserify`](https://npmjs.com/package/tty-browserify) | MIT |
|
||||
| [`url`](https://npmjs.com/package/url) | MIT |
|
||||
| [`util`](https://npmjs.com/package/util) | MIT |
|
||||
| [`vm-browserify`](https://npmjs.com/package/vm-browserify) | MIT |
|
||||
|
||||
## Additional credits
|
||||
|
||||
- Bun's JS transpiler, CSS lexer, and Node.js module resolver source code is a Zig port of [@evanw](https://github.com/evanw)’s [esbuild](https://github.com/evanw/esbuild) project.
|
||||
- Credit to [@kipply](https://github.com/kipply) for the name "Bun"!
|
||||
28
Makefile
28
Makefile
@@ -26,8 +26,11 @@ ifeq ($(ARCH_NAME_RAW),arm64)
|
||||
ARCH_NAME = aarch64
|
||||
DOCKER_BUILDARCH = arm64
|
||||
BREW_PREFIX_PATH = /opt/homebrew
|
||||
DEFAULT_MIN_MACOS_VERSION = 11.0
|
||||
DEFAULT_MIN_MACOS_VERSION = 13.0
|
||||
MARCH_NATIVE = -mtune=$(CPU_TARGET)
|
||||
ifeq ($(OS_NAME),linux)
|
||||
MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1
|
||||
endif
|
||||
else
|
||||
ARCH_NAME = x64
|
||||
DOCKER_BUILDARCH = amd64
|
||||
@@ -129,7 +132,7 @@ SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null)
|
||||
|
||||
BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
|
||||
BUN_DEPS_DIR ?= $(shell pwd)/src/deps
|
||||
BUN_DEPS_OUT_DIR ?= $(BUN_DEPS_DIR)
|
||||
BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/bun-deps
|
||||
CPU_COUNT = 2
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
CPU_COUNT = $(shell sysctl -n hw.logicalcpu)
|
||||
@@ -154,7 +157,12 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
|
||||
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
|
||||
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
|
||||
-DCMAKE_AR=$(AR) \
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
|
||||
-DCMAKE_CXX_STANDARD=20 \
|
||||
-DCMAKE_C_STANDARD=17 \
|
||||
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_C_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_CXX_EXTENSIONS=ON
|
||||
|
||||
|
||||
|
||||
@@ -181,8 +189,8 @@ endif
|
||||
|
||||
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
|
||||
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_TMP_DIR := /tmp/make-bun
|
||||
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
|
||||
|
||||
@@ -449,8 +457,7 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
|
||||
-ldecrepit \
|
||||
-lssl \
|
||||
-lcrypto \
|
||||
-llolhtml \
|
||||
-lbase64
|
||||
-llolhtml
|
||||
|
||||
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
|
||||
-larchive \
|
||||
@@ -1971,11 +1978,6 @@ copy-to-bun-release-dir-bin:
|
||||
|
||||
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
|
||||
|
||||
.PHONY: base64
|
||||
base64:
|
||||
cd $(BUN_DEPS_DIR)/base64 && make clean && rm -rf CMakeCache.txt CMakeFiles && cmake $(CMAKE_FLAGS) . && make
|
||||
cp $(BUN_DEPS_DIR)/base64/libbase64.a $(BUN_DEPS_OUT_DIR)/libbase64.a
|
||||
|
||||
.PHONY: cold-jsc-start
|
||||
cold-jsc-start:
|
||||
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
|
||||
@@ -1993,7 +1995,7 @@ cold-jsc-start:
|
||||
misctools/cold-jsc-start.cpp -o cold-jsc-start
|
||||
|
||||
.PHONY: vendor-without-npm
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws lshpack tinycc c-ares zstd base64
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws lshpack tinycc c-ares zstd
|
||||
|
||||
|
||||
.PHONY: vendor-without-check
|
||||
|
||||
@@ -224,6 +224,7 @@ bun upgrade --canary
|
||||
- [Build an HTTP server using StricJS and Bun](https://bun.sh/guides/ecosystem/stric)
|
||||
- [Containerize a Bun application with Docker](https://bun.sh/guides/ecosystem/docker)
|
||||
- [Create a Discord bot](https://bun.sh/guides/ecosystem/discordjs)
|
||||
- [Deploy a Bun application on Render](https://bun.sh/guides/ecosystem/render)
|
||||
- [Read and write data to MongoDB using Mongoose and Bun](https://bun.sh/guides/ecosystem/mongoose)
|
||||
- [Run Bun as a daemon with PM2](https://bun.sh/guides/ecosystem/pm2)
|
||||
- [Run Bun as a daemon with systemd](https://bun.sh/guides/ecosystem/systemd)
|
||||
@@ -233,6 +234,7 @@ bun upgrade --canary
|
||||
- [Use Neon's Serverless Postgres with Bun](https://bun.sh/guides/ecosystem/neon-serverless-postgres)
|
||||
- [Use Prisma with Bun](https://bun.sh/guides/ecosystem/prisma)
|
||||
- [Use React and JSX](https://bun.sh/guides/ecosystem/react)
|
||||
- [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry)
|
||||
|
||||
- HTTP
|
||||
- [Common HTTP server usage](https://bun.sh/guides/http/server)
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -3,6 +3,7 @@
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@babel/standalone": "^7.24.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"benchmark": "^2.1.4",
|
||||
"braces": "^3.0.2",
|
||||
|
||||
@@ -6,6 +6,7 @@ const App = () => (
|
||||
<html>
|
||||
<body>
|
||||
<h1>Hello World</h1>
|
||||
<p>This is an example.</p>
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
21
bench/snippets/buffer-base64.mjs
Normal file
21
bench/snippets/buffer-base64.mjs
Normal file
@@ -0,0 +1,21 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
function makeBenchmark(size, isToString) {
|
||||
const base64Input = Buffer.alloc(size, "latin1").toString("base64");
|
||||
const base64From = Buffer.from(base64Input, "base64");
|
||||
|
||||
if (!isToString)
|
||||
bench(`Buffer.from(${size} bytes, 'base64')`, () => {
|
||||
Buffer.from(base64Input, "base64");
|
||||
});
|
||||
|
||||
if (isToString)
|
||||
bench(`Buffer(${size}).toString('base64')`, () => {
|
||||
base64From.toString("base64");
|
||||
});
|
||||
}
|
||||
|
||||
[32, 512, 64 * 1024, 512 * 1024, 1024 * 1024 * 8].forEach(s => makeBenchmark(s, true));
|
||||
[32, 512, 64 * 1024, 512 * 1024, 1024 * 1024 * 8].forEach(s => makeBenchmark(s, false));
|
||||
|
||||
await run();
|
||||
29
bench/snippets/pbkdf2.mjs
Normal file
29
bench/snippets/pbkdf2.mjs
Normal file
@@ -0,0 +1,29 @@
|
||||
import { pbkdf2, pbkdf2Sync } from "node:crypto";
|
||||
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
const password = "password";
|
||||
const salt = "salt";
|
||||
const iterations = 1000;
|
||||
const keylen = 32;
|
||||
const hash = "sha256";
|
||||
|
||||
bench("pbkdf2(iterations = 1000, 'sha256') -> 32", async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
pbkdf2(password, salt, iterations, keylen, hash, (err, key) => {
|
||||
if (err) return reject(err);
|
||||
resolve(key);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
bench("pbkdf2(iterations = 500_000, 'sha256') -> 32", async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
pbkdf2(password, salt, 500_000, keylen, hash, (err, key) => {
|
||||
if (err) return reject(err);
|
||||
resolve(key);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,16 +1,42 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
bench("setTimeout(, 4) 100 times", async () => {
|
||||
var i = 100;
|
||||
while (--i >= 0) {
|
||||
await new Promise((resolve, reject) => {
|
||||
setTimeout(() => {
|
||||
resolve();
|
||||
}, 4);
|
||||
});
|
||||
}
|
||||
});
|
||||
let count = 20_000_000;
|
||||
const batchSize = 1_000_000;
|
||||
console.time("Run");
|
||||
|
||||
setTimeout(() => {
|
||||
run({}).then(() => {});
|
||||
}, 1);
|
||||
let { promise, resolve, reject } = Promise.withResolvers();
|
||||
let remaining = count;
|
||||
|
||||
if (batchSize === 0) {
|
||||
for (let i = 0; i < count; i++) {
|
||||
setTimeout(() => {
|
||||
remaining--;
|
||||
if (remaining === 0) {
|
||||
resolve();
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
await promise;
|
||||
} else {
|
||||
for (let i = 0; i < count; i += batchSize) {
|
||||
let batch = Math.min(batchSize, count - i);
|
||||
console.time("Batch " + i + " - " + (i + batch));
|
||||
let { promise: batchPromise, resolve: batchResolve } = Promise.withResolvers();
|
||||
let remaining = batch;
|
||||
for (let j = 0; j < batch; j++) {
|
||||
setTimeout(() => {
|
||||
remaining--;
|
||||
if (remaining === 0) {
|
||||
batchResolve();
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
await batchPromise;
|
||||
console.timeEnd("Batch " + i + " - " + (i + batch));
|
||||
}
|
||||
}
|
||||
|
||||
const fmt = new Intl.NumberFormat();
|
||||
console.log("Executed", fmt.format(count), "timers");
|
||||
console.timeEnd("Run");
|
||||
process.exit(0);
|
||||
|
||||
14
bench/snippets/transpiler-2.mjs
Normal file
14
bench/snippets/transpiler-2.mjs
Normal file
@@ -0,0 +1,14 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { join } from "path";
|
||||
|
||||
const code = require("fs").readFileSync(
|
||||
process.argv[2] || join(import.meta.dir, "../node_modules/@babel/standalone/babel.min.js"),
|
||||
);
|
||||
|
||||
const transpiler = new Bun.Transpiler({ minify: true });
|
||||
|
||||
bench("transformSync", () => {
|
||||
transpiler.transformSync(code);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -39,7 +39,7 @@ _read_scripts_in_package_json() {
|
||||
[[ "${COMP_WORDS[${line}]}" == "--cwd" ]] && working_dir="${COMP_WORDS[$((line + 1))]}";
|
||||
done
|
||||
|
||||
[[ -f "${working_dir}/package.json" ]] && package_json=$(<${working_dir}/package.json);
|
||||
[[ -f "${working_dir}/package.json" ]] && package_json=$(<"${working_dir}/package.json");
|
||||
|
||||
[[ "${package_json}" =~ "\"scripts\""[[:space:]]*":"[[:space:]]*\{(.*)\} ]] && {
|
||||
local package_json_compreply;
|
||||
@@ -82,7 +82,7 @@ _bun_completions() {
|
||||
declare -A PACKAGE_OPTIONS;
|
||||
declare -A PM_OPTIONS;
|
||||
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x test repl update link unlink build";
|
||||
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
|
||||
|
||||
@@ -53,7 +53,7 @@ function __bun_complete_bins_scripts --inherit-variable bun_builtin_cmds_without
|
||||
# Scripts have descriptions appended with a tab separator.
|
||||
# Strip off descriptions for the purposes of subcommand testing.
|
||||
set -l scripts (__fish__get_bun_scripts)
|
||||
if __fish_seen_subcommand_from $(string split \t -f 1 -- $scripts)
|
||||
if __fish_seen_subcommand_from (string split \t -f 1 -- $scripts)
|
||||
return
|
||||
end
|
||||
# Emit scripts.
|
||||
|
||||
@@ -425,6 +425,7 @@ _bun_run_completion() {
|
||||
'--external[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
|
||||
'-e[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
|
||||
'--loader[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
|
||||
'--packages[Exclude dependencies from bundle, e.g. --packages external. Valid options: bundle, external]:packages' \
|
||||
'-l[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
|
||||
'--origin[Rewrite import URLs to start with --origin. Default: ""]:origin' \
|
||||
'-u[Rewrite import URLs to start with --origin. Default: ""]:origin' \
|
||||
|
||||
@@ -61,7 +61,7 @@ To do anything interesting we need a construct known as a "view". A view is a cl
|
||||
|
||||
The `DataView` class is a lower-level interface for reading and manipulating the data in an `ArrayBuffer`.
|
||||
|
||||
Below we create a new `DataView` and set the first byte to 5.
|
||||
Below we create a new `DataView` and set the first byte to 3.
|
||||
|
||||
```ts
|
||||
const buf = new ArrayBuffer(4);
|
||||
@@ -395,7 +395,7 @@ Bun implements `Buffer`, a Node.js API for working with binary data that pre-dat
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello world");
|
||||
// => Buffer(16) [ 116, 104, 105, 115, 32, 105, 115, 32, 97, 32, 115, 116, 114, 105, 110, 103 ]
|
||||
// => Buffer(11) [ 104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100 ]
|
||||
|
||||
buf.length; // => 11
|
||||
buf[0]; // => 104, ascii for 'h'
|
||||
|
||||
308
docs/api/fetch.md
Normal file
308
docs/api/fetch.md
Normal file
@@ -0,0 +1,308 @@
|
||||
Bun implements the WHATWG `fetch` standard, with some extensions to meet the needs of server-side JavaScript.
|
||||
|
||||
Bun also implements `node:http`, but `fetch` is generally recommended instead.
|
||||
|
||||
## Sending an HTTP request
|
||||
|
||||
To send an HTTP request, use `fetch`
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
console.log(response.status); // => 200
|
||||
|
||||
const text = await response.text(); // or response.json(), response.formData(), etc.
|
||||
```
|
||||
|
||||
`fetch` also works with HTTPS URLs.
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://example.com");
|
||||
```
|
||||
|
||||
You can also pass `fetch` a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object.
|
||||
|
||||
```ts
|
||||
const request = new Request("http://example.com", {
|
||||
method: "POST",
|
||||
body: "Hello, world!",
|
||||
});
|
||||
|
||||
const response = await fetch(request);
|
||||
```
|
||||
|
||||
### Sending a POST request
|
||||
|
||||
To send a POST request, pass an object with the `method` property set to `"POST"`.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
method: "POST",
|
||||
body: "Hello, world!",
|
||||
});
|
||||
```
|
||||
|
||||
`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Body/body) for more information.
|
||||
|
||||
### Proxying requests
|
||||
|
||||
To proxy a request, pass an object with the `proxy` property set to a URL.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
proxy: "http://proxy.com",
|
||||
});
|
||||
```
|
||||
|
||||
### Custom headers
|
||||
|
||||
To set custom headers, pass an object with the `headers` property set to an object.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
headers: {
|
||||
"X-Custom-Header": "value",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
You can also set headers using the [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object.
|
||||
|
||||
```ts
|
||||
const headers = new Headers();
|
||||
headers.append("X-Custom-Header", "value");
|
||||
|
||||
const response = await fetch("http://example.com", {
|
||||
headers,
|
||||
});
|
||||
```
|
||||
|
||||
### Response bodies
|
||||
|
||||
To read the response body, use one of the following methods:
|
||||
|
||||
- `response.text(): Promise<string>`: Returns a promise that resolves with the response body as a string.
|
||||
- `response.json(): Promise<any>`: Returns a promise that resolves with the response body as a JSON object.
|
||||
- `response.formData(): Promise<FormData>`: Returns a promise that resolves with the response body as a `FormData` object.
|
||||
- `response.bytes(): Promise<Uint8Array>`: Returns a promise that resolves with the response body as a `Uint8Array`.
|
||||
- `response.arrayBuffer(): Promise<ArrayBuffer>`: Returns a promise that resolves with the response body as an `ArrayBuffer`.
|
||||
- `response.blob(): Promise<Blob>`: Returns a promise that resolves with the response body as a `Blob`.
|
||||
|
||||
#### Streaming response bodies
|
||||
|
||||
You can use async iterators to stream the response body.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
for await (const chunk of response.body) {
|
||||
console.log(chunk);
|
||||
}
|
||||
```
|
||||
|
||||
You can also more directly access the `ReadableStream` object.
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com");
|
||||
|
||||
const stream = response.body;
|
||||
|
||||
const reader = stream.getReader();
|
||||
const { value, done } = await reader.read();
|
||||
```
|
||||
|
||||
### Fetching a URL with a timeout
|
||||
|
||||
To fetch a URL with a timeout, use `AbortSignal.timeout`:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
signal: AbortSignal.timeout(1000),
|
||||
});
|
||||
```
|
||||
|
||||
#### Canceling a request
|
||||
|
||||
To cancel a request, use an `AbortController`:
|
||||
|
||||
```ts
|
||||
const controller = new AbortController();
|
||||
|
||||
const response = await fetch("http://example.com", {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
controller.abort();
|
||||
```
|
||||
|
||||
### Unix domain sockets
|
||||
|
||||
To fetch a URL using a Unix domain socket, use the `unix: string` option:
|
||||
|
||||
```ts
|
||||
const response = await fetch("https://hostname/a/path", {
|
||||
unix: "/var/run/path/to/unix.sock",
|
||||
method: "POST",
|
||||
body: JSON.stringify({ message: "Hello from Bun!" }),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### TLS
|
||||
|
||||
To use a client certificate, use the `tls` option:
|
||||
|
||||
```ts
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
key: Bun.file("/path/to/key.pem"),
|
||||
cert: Bun.file("/path/to/cert.pem"),
|
||||
// ca: [Bun.file("/path/to/ca.pem")],
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Custom TLS Validation
|
||||
|
||||
To customize the TLS validation, use the `checkServerIdentity` option in `tls`
|
||||
|
||||
```ts
|
||||
await fetch("https://example.com", {
|
||||
tls: {
|
||||
checkServerIdentity: (hostname, peerCertificate) => {
|
||||
// Return an error if the certificate is invalid
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
This is similar to how it works in Node's `net` module.
|
||||
|
||||
## Debugging
|
||||
|
||||
To help with debugging, you can pass `verbose: true` to `fetch`:
|
||||
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
verbose: true,
|
||||
});
|
||||
```
|
||||
|
||||
This will print the request and response headers to your terminal:
|
||||
|
||||
```sh
|
||||
[fetch] > HTTP/1.1 GET http://example.com/
|
||||
[fetch] > Connection: keep-alive
|
||||
[fetch] > User-Agent: Bun/1.1.21
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Content-Encoding: gzip
|
||||
[fetch] < Age: 201555
|
||||
[fetch] < Cache-Control: max-age=604800
|
||||
[fetch] < Content-Type: text/html; charset=UTF-8
|
||||
[fetch] < Date: Sun, 21 Jul 2024 02:41:14 GMT
|
||||
[fetch] < Etag: "3147526947+gzip"
|
||||
[fetch] < Expires: Sun, 28 Jul 2024 02:41:14 GMT
|
||||
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
|
||||
[fetch] < Server: ECAcc (sac/254F)
|
||||
[fetch] < Vary: Accept-Encoding
|
||||
[fetch] < X-Cache: HIT
|
||||
[fetch] < Content-Length: 648
|
||||
```
|
||||
|
||||
Note: `verbose: boolean` is not part of the Web standard `fetch` API and is specific to Bun.
|
||||
|
||||
## Performance
|
||||
|
||||
Before an HTTP request can be sent, the DNS lookup must be performed. This can take a significant amount of time, especially if the DNS server is slow or the network connection is poor.
|
||||
|
||||
After the DNS lookup, the TCP socket must be connected and the TLS handshake might need to be performed. This can also take a significant amount of time.
|
||||
|
||||
After the request completes, consuming the response body can also take a significant amount of time and memory.
|
||||
|
||||
At every step of the way, Bun provides APIs to help you optimize the performance of your application.
|
||||
|
||||
### DNS prefetching
|
||||
|
||||
To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful when you know you'll need to connect to a host soon and want to avoid the initial DNS lookup.
|
||||
|
||||
```ts
|
||||
import { dns } from "bun";
|
||||
|
||||
dns.prefetch("bun.sh", 443);
|
||||
```
|
||||
|
||||
#### DNS caching
|
||||
|
||||
By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`:
|
||||
|
||||
To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation.
|
||||
|
||||
### Preconnect to a host
|
||||
|
||||
To preconnect to a host, you can use the `fetch.preconnect` API. This API is useful when you know you'll need to connect to a host soon and want to start the initial DNS lookup, TCP socket connection, and TLS handshake early.
|
||||
|
||||
```ts
|
||||
import { fetch } from "bun";
|
||||
|
||||
fetch.preconnect("https://bun.sh");
|
||||
```
|
||||
|
||||
Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet.
|
||||
|
||||
#### Preconnect at startup
|
||||
|
||||
To preconnect to a host at startup, you can pass `--fetch-preconnect`:
|
||||
|
||||
```sh
|
||||
$ bun --fetch-preconnect https://bun.sh ./my-script.ts
|
||||
```
|
||||
|
||||
This is sort of like `<link rel="preconnect">` in HTML.
|
||||
|
||||
This feature is not implemented on Windows yet. If you're interested in using this feature on Windows, please file an issue and we can implement support for it on Windows.
|
||||
|
||||
### Connection pooling & HTTP keep-alive
|
||||
|
||||
Bun automatically reuses connections to the same host. This is known as connection pooling. This can significantly reduce the time it takes to establish a connection. You don't need to do anything to enable this; it's automatic.
|
||||
|
||||
#### Simultaneous connection limit
|
||||
|
||||
By default, Bun limits the maximum number of simultaneous `fetch` requests to 256. We do this for several reasons:
|
||||
|
||||
- It improves overall system stability. Operating systems have an upper limit on the number of simultaneous open TCP sockets, usually in the low thousands. Nearing this limit causes your entire computer to behave strangely. Applications hang and crash.
|
||||
- It encourages HTTP Keep-Alive connection reuse. For short-lived HTTP requests, the slowest step is often the initial connection setup. Reusing connections can save a lot of time.
|
||||
|
||||
When the limit is exceeded, the requests are queued and sent as soon as the next request ends.
|
||||
|
||||
You can increase the maximum number of simultaneous connections via the `BUN_CONFIG_MAX_HTTP_REQUESTS` environment variable:
|
||||
|
||||
```sh
|
||||
$ BUN_CONFIG_MAX_HTTP_REQUESTS=512 bun ./my-script.ts
|
||||
```
|
||||
|
||||
The max value for this limit is currently set to 65,336. The maximum port number is 65,535, so it's quite difficult for any one computer to exceed this limit.
|
||||
|
||||
### Response buffering
|
||||
|
||||
Bun goes to great lengths to optimize the performance of reading the response body. The fastest way to read the response body is to use one of these methods:
|
||||
|
||||
- `response.text(): Promise<string>`
|
||||
- `response.json(): Promise<any>`
|
||||
- `response.formData(): Promise<FormData>`
|
||||
- `response.bytes(): Promise<Uint8Array>`
|
||||
- `response.arrayBuffer(): Promise<ArrayBuffer>`
|
||||
- `response.blob(): Promise<Blob>`
|
||||
|
||||
You can also use `Bun.write` to write the response body to a file on disk:
|
||||
|
||||
```ts
|
||||
import { write } from "bun";
|
||||
|
||||
await write("output.txt", response);
|
||||
```
|
||||
@@ -16,7 +16,10 @@ Features include:
|
||||
- Parameters (named & positional)
|
||||
- Prepared statements
|
||||
- Datatype conversions (`BLOB` becomes `Uint8Array`)
|
||||
- Map query results to classes without an ORM - `query.as(MyClass)`
|
||||
- The fastest performance of any SQLite driver for JavaScript
|
||||
- `bigint` support
|
||||
- Multi-query statements (e.g. `SELECT 1; SELECT 2;`) in a single call to database.run(query)
|
||||
|
||||
The `bun:sqlite` module is roughly 3-6x faster than `better-sqlite3` and 8-9x faster than `deno.land/x/sqlite` for read queries. Each driver was benchmarked against the [Northwind Traders](https://github.com/jpwhite3/northwind-SQLite3/blob/46d5f8a64f396f87cd374d1600dbf521523980e8/Northwind_large.sqlite.zip) dataset. View and run the [benchmark source](https://github.com/oven-sh/bun/tree/main/bench/sqlite).
|
||||
|
||||
@@ -57,6 +60,39 @@ import { Database } from "bun:sqlite";
|
||||
const db = new Database("mydb.sqlite", { create: true });
|
||||
```
|
||||
|
||||
### Strict mode
|
||||
|
||||
{% callout %}
|
||||
Added in Bun v1.1.14
|
||||
{% /callout %}
|
||||
|
||||
By default, `bun:sqlite` requires binding parameters to include the `$`, `:`, or `@` prefix, and does not throw an error if a parameter is missing.
|
||||
|
||||
To instead throw an error when a parameter is missing and allow binding without a prefix, set `strict: true` on the `Database` constructor:
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
```ts
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const strict = new Database(
|
||||
":memory:",
|
||||
{ strict: true }
|
||||
);
|
||||
|
||||
// throws error because of the typo:
|
||||
const query = strict
|
||||
.query("SELECT $message;")
|
||||
.all({ messag: "Hello world" });
|
||||
|
||||
const notStrict = new Database(
|
||||
":memory:"
|
||||
);
|
||||
// does not throw error:
|
||||
notStrict
|
||||
.query("SELECT $message;")
|
||||
.all({ messag: "Hello world" });
|
||||
```
|
||||
|
||||
### Load via ES module import
|
||||
|
||||
You can also use an import attribute to load a database.
|
||||
@@ -174,6 +210,47 @@ const query = db.query(`SELECT $param1, $param2;`);
|
||||
|
||||
Values are bound to these parameters when the query is executed. A `Statement` can be executed with several different methods, each returning the results in a different form.
|
||||
|
||||
### Binding values
|
||||
|
||||
To bind values to a statement, pass an object to the `.all()`, `.get()`, `.run()`, or `.values()` method.
|
||||
|
||||
```ts
|
||||
const query = db.query(`select $message;`);
|
||||
query.all({ $message: "Hello world" });
|
||||
```
|
||||
|
||||
You can bind using positional parameters too:
|
||||
|
||||
```ts
|
||||
const query = db.query(`select ?1;`);
|
||||
query.all("Hello world");
|
||||
```
|
||||
|
||||
#### `strict: true` lets you bind values without prefixes
|
||||
|
||||
{% callout %}
|
||||
Added in Bun v1.1.14
|
||||
{% /callout %}
|
||||
|
||||
By default, the `$`, `:`, and `@` prefixes are **included** when binding values to named parameters. To bind without these prefixes, use the `strict` option in the `Database` constructor.
|
||||
|
||||
```ts
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const db = new Database(":memory:", {
|
||||
// bind values without prefixes
|
||||
strict: true,
|
||||
});
|
||||
|
||||
const query = db.query(`select $message;`);
|
||||
|
||||
// strict: true
|
||||
query.all({ message: "Hello world" });
|
||||
|
||||
// strict: false
|
||||
// query.all({ $message: "Hello world" });
|
||||
```
|
||||
|
||||
### `.all()`
|
||||
|
||||
Use `.all()` to run a query and get back the results as an array of objects.
|
||||
@@ -205,11 +282,49 @@ Use `.run()` to run a query and get back `undefined`. This is useful for schema-
|
||||
```ts
|
||||
const query = db.query(`create table foo;`);
|
||||
query.run();
|
||||
// => undefined
|
||||
// {
|
||||
// lastInsertRowid: 0,
|
||||
// changes: 0,
|
||||
// }
|
||||
```
|
||||
|
||||
Internally, this calls [`sqlite3_reset`](https://www.sqlite.org/capi3ref.html#sqlite3_reset) and calls [`sqlite3_step`](https://www.sqlite.org/capi3ref.html#sqlite3_step) once. Stepping through all the rows is not necessary when you don't care about the results.
|
||||
|
||||
{% callout %}
|
||||
Since Bun v1.1.14, `.run()` returns an object with two properties: `lastInsertRowid` and `changes`.
|
||||
{% /callout %}
|
||||
|
||||
The `lastInsertRowid` property returns the ID of the last row inserted into the database. The `changes` property is the number of rows affected by the query.
|
||||
|
||||
### `.as(Class)` - Map query results to a class
|
||||
|
||||
{% callout %}
|
||||
Added in Bun v1.1.14
|
||||
{% /callout %}
|
||||
|
||||
Use `.as(Class)` to run a query and get back the results as instances of a class. This lets you attach methods & getters/setters to results.
|
||||
|
||||
```ts
|
||||
class Movie {
|
||||
title: string;
|
||||
year: number;
|
||||
|
||||
get isMarvel() {
|
||||
return this.title.includes("Marvel");
|
||||
}
|
||||
}
|
||||
|
||||
const query = db.query("SELECT title, year FROM movies").as(Movie);
|
||||
const movies = query.all();
|
||||
const first = query.get();
|
||||
console.log(movies[0].isMarvel); // => true
|
||||
console.log(first.isMarvel); // => true
|
||||
```
|
||||
|
||||
As a performance optimization, the class constructor is not called, default initializers are not run, and private fields are not accessible. This is more like using `Object.create` than `new`. The class's prototype is assigned to the object, methods are attached, and getters/setters are set up, but the constructor is not called.
|
||||
|
||||
The database columns are set as properties on the class instance.
|
||||
|
||||
### `.values()`
|
||||
|
||||
Use `values()` to run a query and get back all results as an array of arrays.
|
||||
@@ -300,6 +415,65 @@ const results = query.all("hello", "goodbye");
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## Integers
|
||||
|
||||
sqlite supports signed 64 bit integers, but JavaScript only supports signed 52 bit integers or arbitrary precision integers with `bigint`.
|
||||
|
||||
`bigint` input is supported everywhere, but by default `bun:sqlite` returns integers as `number` types. If you need to handle integers larger than 2^53, set `safeInteger` option to `true` when creating a `Database` instance. This also validates that `bigint` passed to `bun:sqlite` do not exceed 64 bits.
|
||||
|
||||
By default, `bun:sqlite` returns integers as `number` types. If you need to handle integers larger than 2^53, you can use the `bigint` type.
|
||||
|
||||
### `safeIntegers: true`
|
||||
|
||||
{% callout %}
|
||||
Added in Bun v1.1.14
|
||||
{% /callout %}
|
||||
|
||||
When `safeIntegers` is `true`, `bun:sqlite` will return integers as `bigint` types:
|
||||
|
||||
```ts
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const db = new Database(":memory:", { safeIntegers: true });
|
||||
const query = db.query(
|
||||
`SELECT ${BigInt(Number.MAX_SAFE_INTEGER) + 102n} as max_int`,
|
||||
);
|
||||
const result = query.get();
|
||||
console.log(result.max_int); // => 9007199254741093n
|
||||
```
|
||||
|
||||
When `safeIntegers` is `true`, `bun:sqlite` will throw an error if a `bigint` value in a bound parameter exceeds 64 bits:
|
||||
|
||||
```ts
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const db = new Database(":memory:", { safeIntegers: true });
|
||||
db.run("CREATE TABLE test (id INTEGER PRIMARY KEY, value INTEGER)");
|
||||
|
||||
const query = db.query("INSERT INTO test (value) VALUES ($value)");
|
||||
|
||||
try {
|
||||
query.run({ $value: BigInt(Number.MAX_SAFE_INTEGER) ** 2n });
|
||||
} catch (e) {
|
||||
console.log(e.message); // => BigInt value '81129638414606663681390495662081' is out of range
|
||||
}
|
||||
```
|
||||
|
||||
### `safeIntegers: false` (default)
|
||||
|
||||
When `safeIntegers` is `false`, `bun:sqlite` will return integers as `number` types and truncate any bits beyond 53:
|
||||
|
||||
```ts
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const db = new Database(":memory:", { safeIntegers: false });
|
||||
const query = db.query(
|
||||
`SELECT ${BigInt(Number.MAX_SAFE_INTEGER) + 102n} as max_int`,
|
||||
);
|
||||
const result = query.get();
|
||||
console.log(result.max_int); // => 9007199254741092
|
||||
```
|
||||
|
||||
## Transactions
|
||||
|
||||
Transactions are a mechanism for executing multiple queries in an _atomic_ way; that is, either all of the queries succeed or none of them do. Create a transaction with the `db.transaction()` method:
|
||||
@@ -447,12 +621,20 @@ class Database {
|
||||
);
|
||||
|
||||
query<Params, ReturnType>(sql: string): Statement<Params, ReturnType>;
|
||||
run(
|
||||
sql: string,
|
||||
params?: SQLQueryBindings,
|
||||
): { lastInsertRowid: number; changes: number };
|
||||
exec = this.run;
|
||||
}
|
||||
|
||||
class Statement<Params, ReturnType> {
|
||||
all(params: Params): ReturnType[];
|
||||
get(params: Params): ReturnType | undefined;
|
||||
run(params: Params): void;
|
||||
run(params: Params): {
|
||||
lastInsertRowid: number;
|
||||
changes: number;
|
||||
};
|
||||
values(params: Params): unknown[][];
|
||||
|
||||
finalize(): void; // destroy statement and clean up resources
|
||||
@@ -461,6 +643,8 @@ class Statement<Params, ReturnType> {
|
||||
columnNames: string[]; // the column names of the result set
|
||||
paramsCount: number; // the number of parameters expected by the statement
|
||||
native: any; // the native object representing the statement
|
||||
|
||||
as(Class: new () => ReturnType): this;
|
||||
}
|
||||
|
||||
type SQLQueryBindings =
|
||||
|
||||
@@ -183,7 +183,7 @@ const currentFile = import.meta.url;
|
||||
Bun.openInEditor(currentFile);
|
||||
```
|
||||
|
||||
You can override this via the `debug.editor` setting in your [`bunfig.toml`](/docs/runtime/bunfig)
|
||||
You can override this via the `debug.editor` setting in your [`bunfig.toml`](/docs/runtime/bunfig).
|
||||
|
||||
```toml-diff#bunfig.toml
|
||||
+ [debug]
|
||||
@@ -200,8 +200,6 @@ Bun.openInEditor(import.meta.url, {
|
||||
});
|
||||
```
|
||||
|
||||
Bun.ArrayBufferSink;
|
||||
|
||||
## `Bun.deepEquals()`
|
||||
|
||||
Recursively checks if two objects are equivalent. This is used internally by `expect().toEqual()` in `bun:test`.
|
||||
@@ -251,11 +249,11 @@ Bun.deepEquals(new Foo(), { a: 1 }, true); // false
|
||||
|
||||
Escapes the following characters from an input string:
|
||||
|
||||
- `"` becomes `"""`
|
||||
- `&` becomes `"&"`
|
||||
- `'` becomes `"'"`
|
||||
- `<` becomes `"<"`
|
||||
- `>` becomes `">"`
|
||||
- `"` becomes `"`
|
||||
- `&` becomes `&`
|
||||
- `'` becomes `'`
|
||||
- `<` becomes `<`
|
||||
- `>` becomes `>`
|
||||
|
||||
This function is optimized for large input. On an M1X, it processes 480 MB/s -
|
||||
20 GB/s, depending on how much data is being escaped and whether there is non-ascii
|
||||
|
||||
@@ -13,8 +13,7 @@ Like in browsers, [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Wo
|
||||
### From the main thread
|
||||
|
||||
```js#Main_thread
|
||||
const workerURL = new URL("worker.ts", import.meta.url).href;
|
||||
const worker = new Worker(workerURL);
|
||||
const worker = new Worker("./worker.ts");
|
||||
|
||||
worker.postMessage("hello");
|
||||
worker.onmessage = event => {
|
||||
@@ -51,6 +50,38 @@ const worker = new Worker("/not-found.js");
|
||||
|
||||
The specifier passed to `Worker` is resolved relative to the project root (like typing `bun ./path/to/file.js`).
|
||||
|
||||
### `blob:` URLs
|
||||
|
||||
As of Bun v1.1.13, you can also pass a `blob:` URL to `Worker`. This is useful for creating workers from strings or other sources.
|
||||
|
||||
```js
|
||||
const blob = new Blob(
|
||||
[
|
||||
`
|
||||
self.onmessage = (event: MessageEvent) => postMessage(event.data)`,
|
||||
],
|
||||
{
|
||||
type: "application/typescript",
|
||||
},
|
||||
);
|
||||
const url = URL.createObjectURL(blob);
|
||||
const worker = new Worker(url);
|
||||
```
|
||||
|
||||
Like the rest of Bun, workers created from `blob:` URLs support TypeScript, JSX, and other file types out of the box. You can communicate it should be loaded via typescript either via `type` or by passing a `filename` to the `File` constructor.
|
||||
|
||||
```js
|
||||
const file = new File(
|
||||
[
|
||||
`
|
||||
self.onmessage = (event: MessageEvent) => postMessage(event.data)`,
|
||||
],
|
||||
"worker.ts",
|
||||
);
|
||||
const url = URL.createObjectURL(file);
|
||||
const worker = new Worker(url);
|
||||
```
|
||||
|
||||
### `"open"`
|
||||
|
||||
The `"open"` event is emitted when a worker is created and ready to receive messages. This can be used to send an initial message to a worker once it's ready. (This event does not exist in browsers.)
|
||||
|
||||
@@ -563,12 +563,12 @@ Specifies the type of sourcemap to generate.
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
sourcemap: "external", // default "none"
|
||||
sourcemap: 'linked', // default 'none'
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --sourcemap=external
|
||||
$ bun build ./index.tsx --outdir ./out --sourcemap=linked
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -582,19 +582,19 @@ $ bun build ./index.tsx --outdir ./out --sourcemap=external
|
||||
|
||||
---
|
||||
|
||||
- `"inline"`
|
||||
- A sourcemap is generated and appended to the end of the generated bundle as a base64 payload.
|
||||
- `"linked"`
|
||||
- A separate `*.js.map` file is created alongside each `*.js` bundle using a `//# sourceMappingURL` comment to link the two. Requires `--outdir` to be set. The base URL of this can be customized with `--public-path`.
|
||||
|
||||
```ts
|
||||
// <bundled code here>
|
||||
|
||||
//# sourceMappingURL=data:application/json;base64,<encoded sourcemap here>
|
||||
//# sourceMappingURL=bundle.js.map
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- `"external"`
|
||||
- A separate `*.js.map` file is created alongside each `*.js` bundle.
|
||||
- A separate `*.js.map` file is created alongside each `*.js` bundle without inserting a `//# sourceMappingURL` comment.
|
||||
|
||||
{% /table %}
|
||||
|
||||
@@ -608,7 +608,18 @@ Generated bundles contain a [debug id](https://sentry.engineering/blog/the-case-
|
||||
//# debugId=<DEBUG ID>
|
||||
```
|
||||
|
||||
The associated `*.js.map` sourcemap will be a JSON file containing an equivalent `debugId` property.
|
||||
---
|
||||
|
||||
- `"inline"`
|
||||
- A sourcemap is generated and appended to the end of the generated bundle as a base64 payload.
|
||||
|
||||
```ts
|
||||
// <bundled code here>
|
||||
|
||||
//# sourceMappingURL=data:application/json;base64,<encoded sourcemap here>
|
||||
```
|
||||
|
||||
The associated `*.js.map` sourcemap will be a JSON file containing an equivalent `debugId` property.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
@@ -745,6 +756,25 @@ $ bun build ./index.tsx --outdir ./out --external '*'
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `packages`
|
||||
|
||||
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun threats any import which path do not start with `.`, `..` or `/` as package.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.ts'],
|
||||
packages: 'external',
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.ts --packages external
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `naming`
|
||||
|
||||
Customizes the generated file names. Defaults to `./[dir]/[name].[ext]`.
|
||||
@@ -1246,7 +1276,7 @@ interface BuildOptions {
|
||||
loader?: { [k in string]: Loader }; // See https://bun.sh/docs/bundler/loaders
|
||||
manifest?: boolean; // false
|
||||
external?: string[]; // []
|
||||
sourcemap?: "none" | "inline" | "external"; // "none"
|
||||
sourcemap?: "none" | "inline" | "linked" | "external" | boolean; // "none"
|
||||
root?: string; // computed from entrypoints
|
||||
naming?:
|
||||
| string
|
||||
|
||||
@@ -94,8 +94,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--packages`
|
||||
- n/a
|
||||
- Not supported
|
||||
- `--packages`
|
||||
- No differences
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -35,6 +35,10 @@ $ bun add --optional lodash
|
||||
|
||||
## `--exact`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `-E`
|
||||
{% /callout %}
|
||||
|
||||
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
|
||||
|
||||
```bash
|
||||
@@ -117,12 +121,16 @@ Bun reads this field and will run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
## Git dependencies
|
||||
|
||||
To add a dependency from a git repository:
|
||||
To add a dependency from a public or private git repository:
|
||||
|
||||
```bash
|
||||
$ bun add git@github.com:moment/moment.git
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note** — To install private repositories, your system needs the appropriate SSH credentials to access the repository.
|
||||
{% /callout %}
|
||||
|
||||
Bun supports a variety of protocols, including [`github`](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#github-urls), [`git`](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#git-urls-as-dependencies), `git+ssh`, `git+https`, and many more.
|
||||
|
||||
```json
|
||||
|
||||
9
docs/cli/patch-commit.md
Normal file
9
docs/cli/patch-commit.md
Normal file
@@ -0,0 +1,9 @@
|
||||
An alias for `bun patch --commit` to maintain compatibility with pnpm.
|
||||
|
||||
You must prepare the package for patching with [`bun patch <pkg>`](/docs/cli/patch) first.
|
||||
|
||||
### `--patches-dir`
|
||||
|
||||
By default, `bun patch-commit` will use the `patches` directory in the temporary directory.
|
||||
|
||||
You can specify a different directory with the `--patches-dir` flag.
|
||||
@@ -56,3 +56,17 @@ To clear Bun's global module cache:
|
||||
```bash
|
||||
$ bun pm cache rm
|
||||
```
|
||||
|
||||
## List global installs
|
||||
|
||||
To list all globally installed packages:
|
||||
|
||||
```bash
|
||||
$ bun pm ls -g
|
||||
```
|
||||
|
||||
To list all globally installed packages, including nth-order dependencies:
|
||||
|
||||
```bash
|
||||
$ bun pm ls -g --all
|
||||
```
|
||||
|
||||
@@ -1,17 +1,34 @@
|
||||
To update all dependencies to the latest version _that's compatible with the version range specified in your `package.json`_:
|
||||
To update all dependencies to the latest version:
|
||||
|
||||
```sh
|
||||
$ bun update
|
||||
```
|
||||
|
||||
## `--force`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `-f`
|
||||
{% /callout %}
|
||||
|
||||
By default, Bun respects the version range defined in your package.json. To ignore this and update to the latest version, you can pass in the `force` flag.
|
||||
To update a specific dependency to the latest version:
|
||||
|
||||
```sh
|
||||
$ bun update --force
|
||||
$ bun update [package]
|
||||
```
|
||||
|
||||
## `--latest`
|
||||
|
||||
By default, `bun update` will update to the latest version of a dependency that satisfies the version range specified in your `package.json`.
|
||||
|
||||
To update to the latest version, regardless of if it's compatible with the current version range, use the `--latest` flag:
|
||||
|
||||
```sh
|
||||
$ bun update --latest
|
||||
```
|
||||
|
||||
For example, with the following `package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"react": "^17.0.2"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- `bun update` would update to a version that matches `17.x`.
|
||||
- `bun update --latest` would update to a version that matches `18.x` or later.
|
||||
|
||||
@@ -15,7 +15,7 @@ To _containerize_ our application, we define a `Dockerfile`. This file contains
|
||||
```docker#Dockerfile
|
||||
# use the official Bun image
|
||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
||||
FROM oven/bun:1 as base
|
||||
FROM oven/bun:1 AS base
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# install dependencies into temp directory
|
||||
|
||||
@@ -69,7 +69,7 @@ export const movies = sqliteTable("movies", {
|
||||
We can use the `drizzle-kit` CLI to generate an initial SQL migration.
|
||||
|
||||
```sh
|
||||
$ bunx drizzle-kit generate:sqlite --schema ./schema.ts
|
||||
$ bunx drizzle-kit generate --dialect sqlite --schema ./schema.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -20,7 +20,7 @@ $ bun add @neondatabase/serverless
|
||||
Create a `.env.local` file and add your [Neon Postgres connection string](https://neon.tech/docs/connect/connect-from-any-app) to it.
|
||||
|
||||
```sh
|
||||
DATBASE_URL=postgresql://username:password@ep-adj-noun-guid.us-east-1.aws.neon.tech/neondb?sslmode=require
|
||||
DATABASE_URL=postgresql://username:password@ep-adj-noun-guid.us-east-1.aws.neon.tech/neondb?sslmode=require
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
79
docs/guides/ecosystem/render.md
Normal file
79
docs/guides/ecosystem/render.md
Normal file
@@ -0,0 +1,79 @@
|
||||
---
|
||||
name: Deploy a Bun application on Render
|
||||
---
|
||||
|
||||
[Render](https://render.com/) is a cloud platform that lets you flexibly build, deploy, and scale your apps.
|
||||
|
||||
It offers features like auto deploys from GitHub, a global CDN, private networks, automatic HTTPS setup, and managed PostgreSQL and Redis.
|
||||
|
||||
Render supports Bun natively. You can deploy Bun apps as web services, background workers, cron jobs, and more.
|
||||
|
||||
---
|
||||
|
||||
As an example, let's deploy a simple Express HTTP server to Render.
|
||||
|
||||
---
|
||||
|
||||
Create a new GitHub repo named `myapp`. Git clone it locally.
|
||||
|
||||
```bash
|
||||
git clone git@github.com:my-github-username/myapp.git
|
||||
cd myapp
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Add the Express library.
|
||||
|
||||
```bash
|
||||
bun add express
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Define a simple server with Express:
|
||||
|
||||
```ts#app.ts
|
||||
import express from "express";
|
||||
|
||||
const app = express();
|
||||
const port = process.env.PORT || 3001;
|
||||
|
||||
app.get("/", (req, res) => {
|
||||
res.send("Hello World!");
|
||||
});
|
||||
|
||||
app.listen(port, () => {
|
||||
console.log(`Listening on port ${port}...`);
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Commit your changes and push to GitHub.
|
||||
|
||||
```bash
|
||||
git add app.ts bun.lockb package.json
|
||||
git commit -m "Create simple Express app"
|
||||
git push origin main
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
In your [Render Dashboard](https://dashboard.render.com/), click `New` > `Web Service` and connect your `myapp` repo.
|
||||
|
||||
---
|
||||
|
||||
In the Render UI, provide the following values during web service creation:
|
||||
|
||||
| | |
|
||||
| ----------------- | ------------- |
|
||||
| **Runtime** | `Node` |
|
||||
| **Build Command** | `bun install` |
|
||||
| **Start Command** | `bun app.js` |
|
||||
|
||||
---
|
||||
|
||||
That's it! Your web service will be live at its assigned `onrender.com` URL as soon as the build finishes.
|
||||
|
||||
You can view the [deploy logs](https://docs.render.com/logging#logs-for-an-individual-deploy-or-job) for details. Refer to [Render's documentation](https://docs.render.com/deploys) for a complete overview of deploying on Render.
|
||||
52
docs/guides/ecosystem/sentry.md
Normal file
52
docs/guides/ecosystem/sentry.md
Normal file
@@ -0,0 +1,52 @@
|
||||
---
|
||||
name: Add Sentry to a Bun app
|
||||
---
|
||||
|
||||
[Sentry](https://sentry.io) is a developer-first error tracking and performance monitoring platform. Sentry has a first-class SDK for Bun, `@sentry/bun`, that instruments your Bun application to automatically collect error and performance data.
|
||||
|
||||
Don't already have an account and Sentry project established? Head over to [sentry.io](https://sentry.io/signup/), then return to this page.
|
||||
|
||||
---
|
||||
|
||||
To start using Sentry with Bun, first install the Sentry Bun SDK.
|
||||
|
||||
```bash
|
||||
bun add @sentry/bun
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then, initialize the Sentry SDK with your Sentry DSN in your app's entry file. You can find your DSN in your Sentry project settings.
|
||||
|
||||
```js
|
||||
import * as Sentry from "@sentry/bun";
|
||||
|
||||
// Ensure to call this before importing any other modules!
|
||||
Sentry.init({
|
||||
dsn: "__SENTRY_DSN__",
|
||||
|
||||
// Add Performance Monitoring by setting tracesSampleRate
|
||||
// We recommend adjusting this value in production
|
||||
tracesSampleRate: 1.0,
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
You can verify that Sentry is working by capturing a test error:
|
||||
|
||||
```js
|
||||
setTimeout(() => {
|
||||
try {
|
||||
foo();
|
||||
} catch (e) {
|
||||
Sentry.captureException(e);
|
||||
}
|
||||
}, 99);
|
||||
```
|
||||
|
||||
To view and resolve the recorded error, log into [sentry.io](https://sentry.io/) and open your project. Clicking on the error's title will open a page where you can see detailed information and mark it as resolved.
|
||||
|
||||
---
|
||||
|
||||
To learn more about Sentry and using the Sentry Bun SDK, view the [Sentry documentation](https://docs.sentry.io/platforms/javascript/guides/bun).
|
||||
@@ -13,7 +13,7 @@ console.log(Bun.argv);
|
||||
Running this file with arguments results in the following:
|
||||
|
||||
```sh
|
||||
$ bun run cli.tsx --flag1 --flag2 value
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
[ '/path/to/bun', '/path/to/cli.ts', '--flag1', '--flag2', 'value' ]
|
||||
```
|
||||
|
||||
@@ -47,7 +47,7 @@ console.log(positionals);
|
||||
then it outputs
|
||||
|
||||
```
|
||||
$ bun run cli.tsx --flag1 --flag2 value
|
||||
$ bun run cli.ts --flag1 --flag2 value
|
||||
{
|
||||
flag1: true,
|
||||
flag2: "value",
|
||||
|
||||
@@ -16,7 +16,7 @@ await proc.exited;
|
||||
The second argument accepts a configuration object.
|
||||
|
||||
```ts
|
||||
const proc = Bun.spawn("echo", ["Hello, world!"], {
|
||||
const proc = Bun.spawn(["echo", "Hello, world!"], {
|
||||
cwd: "/tmp",
|
||||
env: { FOO: "bar" },
|
||||
onExit(proc, exitCode, signalCode, error) {
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
steps:
|
||||
# ...
|
||||
- uses: actions/checkout@v4
|
||||
+ - uses: oven-sh/setup-bun@v1
|
||||
+ - uses: oven-sh/setup-bun@v2
|
||||
|
||||
# run any `bun` or `bunx` command
|
||||
+ - run: bun install
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# ...
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
+ with:
|
||||
+ bun-version: 1.0.11 # or "latest", "canary", <sha>
|
||||
```
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Import HTML file as text
|
||||
name: Import a HTML file as text
|
||||
---
|
||||
|
||||
To import a `.html` file in Bun as a text file, use the `type: "text"` attribute in the import statement.
|
||||
|
||||
@@ -52,7 +52,7 @@ Different thresholds can be set for line-level and function-level coverage.
|
||||
```toml
|
||||
[test]
|
||||
# to set different thresholds for lines and functions
|
||||
coverageThreshold = { line = 0.5, function = 0.7 }
|
||||
coverageThreshold = { lines = 0.5, functions = 0.7 }
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -22,7 +22,7 @@ Bun.serve<{ socketId: number }>({
|
||||
websocket: {
|
||||
// define websocket handlers
|
||||
async message(ws, message) {
|
||||
// the contextual dta is available as the `data` property
|
||||
// the contextual data is available as the `data` property
|
||||
// on the WebSocket instance
|
||||
console.log(`Received ${message} from ${ws.data.socketId}}`);
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
All packages downloaded from the registry are stored in a global cache at `~/.bun/install/cache`. They are stored in subdirectories named like `${name}@${version}`, so multiple versions of a package can be cached.
|
||||
|
||||
{% details summary="Configuring cache behavior" (bunfig.toml) %}
|
||||
{% details summary="Configuring cache behavior (bunfig.toml)" %}
|
||||
|
||||
```toml
|
||||
[install.cache]
|
||||
@@ -15,8 +15,6 @@ disable = false
|
||||
disableManifest = false
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Minimizing re-downloads
|
||||
|
||||
Bun strives to avoid re-downloading packages multiple times. When installing a package, if the cache already contains a version in the range specified by `package.json`, Bun will use the cached package instead of downloading it again.
|
||||
|
||||
75
docs/install/npmrc.md
Normal file
75
docs/install/npmrc.md
Normal file
@@ -0,0 +1,75 @@
|
||||
Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.com/cli/v10/configuring-npm/npmrc) files, allowing you to reuse existing registry/scope configurations.
|
||||
|
||||
{% callout %}
|
||||
|
||||
**NOTE**: We recommend migrating your `.npmrc` file to Bun's [`bunfig.toml`](/docs/runtime/bunfig) format, as it provides more flexible options and can let you configure Bun-specific options.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
# Supported options
|
||||
|
||||
### `registry`: Set the default registry
|
||||
|
||||
The default registry is used to resolve packages, it's default value is `npm`'s official registry (`https://registry.npmjs.org/`).
|
||||
|
||||
To change it, you can set the `registry` option in `.npmrc`:
|
||||
|
||||
```ini
|
||||
registry=http://localhost:4873/
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is [`install.registry`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
```toml
|
||||
install.registry = "http://localhost:4873/"
|
||||
```
|
||||
|
||||
### `@<scope>:registry`: Set the registry for a specific scope
|
||||
|
||||
Allows you to set the registry for a specific scope:
|
||||
|
||||
```ini
|
||||
@myorg:registry=http://localhost:4873/
|
||||
```
|
||||
|
||||
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
```toml
|
||||
[install.scopes]
|
||||
myorg = "http://localhost:4873/"
|
||||
```
|
||||
|
||||
### `//<registry_url>/:<key>=<value>`: Confgure options for a specific registry
|
||||
|
||||
Allows you to set options for a specific registry:
|
||||
|
||||
```ini
|
||||
# set an auth token for the registry
|
||||
# ${...} is a placeholder for environment variables
|
||||
//http://localhost:4873/:_authToken=${NPM_TOKEN}
|
||||
|
||||
|
||||
# or you could set a username and password
|
||||
# note that the password is base64 encoded
|
||||
//http://localhost:4873/:username=myusername
|
||||
|
||||
//http://localhost:4873/:_password=${NPM_PASSWORD}
|
||||
|
||||
# or use _auth, which is your username and password
|
||||
# combined into a single string, which is then base 64 encoded
|
||||
//http://localhost:4873/:_auth=${NPM_AUTH}
|
||||
```
|
||||
|
||||
The following options are supported:
|
||||
|
||||
- `_authToken`
|
||||
- `username`
|
||||
- `_password` (base64 encoded password)
|
||||
- `_auth` (base64 encoded username:password, e.g. `btoa(username + ":" + password)`)
|
||||
|
||||
The equivalent `bunfig.toml` option is to add a key in [`install.scopes`](/docs/runtime/bunfig#install-registry):
|
||||
|
||||
```toml
|
||||
[install.scopes]
|
||||
myorg = { url = "http://localhost:4873/", username = "myusername", password = "$NPM_PASSWORD" }
|
||||
```
|
||||
57
docs/install/patch.md
Normal file
57
docs/install/patch.md
Normal file
@@ -0,0 +1,57 @@
|
||||
`bun patch` lets you persistently patch node_modules in a maintainable, git-friendly way.
|
||||
|
||||
Sometimes, you need to make a small change to a package in `node_modules/` to fix a bug or add a feature. `bun patch` makes it easy to do this without vendoring the entire package and reuse the patch across multiple installs, multiple projects, and multiple machines.
|
||||
|
||||
Features:
|
||||
|
||||
- Generates `.patch` files applied to dependencies in `node_modules` on install
|
||||
- `.patch` files can be committed to your repository, reused across multiple installs, projects, and machines
|
||||
- `"patchedDependencies"` in `package.json` keeps track of patched packages
|
||||
- `bun patch` lets you patch packages in `node_modules/` while preserving the integrity of Bun's [Global Cache](https://bun.sh/docs/install/cache)
|
||||
- Test your changes locally before committing them with `bun patch --commit <pkg>`
|
||||
- To preserve disk space and keep `bun install` fast, patched packages are committed to the Global Cache and shared across projects where possible
|
||||
|
||||
#### Step 1. Prepare the package for patching
|
||||
|
||||
To get started, use `bun patch <pkg>` to prepare the package for patching:
|
||||
|
||||
```bash
|
||||
# you can supply the package name
|
||||
$ bun patch react
|
||||
|
||||
# ...and a precise version in case multiple versions are installed
|
||||
$ bun patch react@17.0.2
|
||||
|
||||
# or the path to the package
|
||||
$ bun patch node_modules/react
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note** — Don't forget to call `bun patch <pkg>`! This ensures the package folder in `node_modules/` contains a fresh copy of the package with no symlinks/hardlinks to Bun's cache.
|
||||
|
||||
If you forget to do this, you might end up editing the package globally in the cache!
|
||||
{% /callout %}
|
||||
|
||||
#### Step 2. Test your changes locally
|
||||
|
||||
`bun patch <pkg>` makes it safe to edit the `<pkg>` in `node_modules/` directly, while preserving the integrity of Bun's [Global Cache](https://bun.sh/docs/install/cache). This works by re-creating an unlinked clone of the package in `node_modules/` and diffing it against the original package in the Global Cache.
|
||||
|
||||
#### Step 3. Commit your changes
|
||||
|
||||
Once you're happy with your changes, run `bun patch --commit <path or pkg>`.
|
||||
|
||||
Bun will generate a patch file in `patches/`, update your `package.json` and lockfile, and Bun will start using the patched package:
|
||||
|
||||
```bash
|
||||
# you can supply the path to the patched package
|
||||
$ bun patch --commit node_modules/react
|
||||
|
||||
# ... or the package name and optionally the version
|
||||
$ bun patch --commit react@17.0.2
|
||||
|
||||
# choose the directory to store the patch files
|
||||
$ bun patch --commit react --patches-dir=mypatches
|
||||
|
||||
# `patch-commit` is available for compatibility with pnpm
|
||||
$ bun patch-commit react
|
||||
```
|
||||
@@ -30,10 +30,6 @@ $ docker pull oven/bun
|
||||
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
|
||||
```
|
||||
|
||||
```bash#Proto
|
||||
$ proto install bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### Windows
|
||||
@@ -144,9 +140,8 @@ $ bun upgrade
|
||||
{% callout %}
|
||||
**Homebrew users** — To avoid conflicts with Homebrew, use `brew upgrade bun` instead.
|
||||
|
||||
**Scoop users** — To avoid conflicts with Scoop, use `scoop upgrade bun` instead.
|
||||
**Scoop users** — To avoid conflicts with Scoop, use `scoop update bun` instead.
|
||||
|
||||
**proto users** - Use `proto install bun --pin` instead.
|
||||
{% /callout %}
|
||||
|
||||
## Canary builds
|
||||
@@ -291,8 +286,4 @@ $ npm uninstall -g bun
|
||||
$ brew uninstall bun
|
||||
```
|
||||
|
||||
```bash#Proto
|
||||
$ proto uninstall bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
12
docs/nav.ts
12
docs/nav.ts
@@ -193,6 +193,13 @@ export default {
|
||||
page("install/overrides", "Overrides and resolutions", {
|
||||
description: "Specify version ranges for nested dependencies",
|
||||
}),
|
||||
page("install/patch", "Patch dependencies", {
|
||||
description:
|
||||
"Patch dependencies in your project to fix bugs or add features without vendoring the entire package.",
|
||||
}),
|
||||
page("install/npmrc", ".npmrc support", {
|
||||
description: "Bun supports loading some configuration options from .npmrc",
|
||||
}),
|
||||
// page("install/utilities", "Utilities", {
|
||||
// description: "Use `bun pm` to introspect your global module cache or project dependency tree.",
|
||||
// }),
|
||||
@@ -280,8 +287,11 @@ export default {
|
||||
|
||||
divider("API"),
|
||||
page("api/http", "HTTP server", {
|
||||
description: `Bun implements Web-standard fetch, plus a Bun-native API for building fast HTTP servers.`,
|
||||
description: `Bun implements a fast HTTP server built on Request/Response objects, along with supporting node:http APIs.`,
|
||||
}), // "`Bun.serve`"),
|
||||
page("api/fetch", "HTTP client", {
|
||||
description: `Bun implements Web-standard fetch with some Bun-native extensions.`,
|
||||
}), // "fetch"),
|
||||
page("api/websockets", "WebSockets", {
|
||||
description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`,
|
||||
}), // "`Bun.serve`"),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
This document describes the build process for Windows. If you run into problems, please join the [#windows channel on our Discord](http://bun.sh/discord) for help.
|
||||
This document describes the build process for Windows. If you run into problems, please join the [#contributing channel on our Discord](http://bun.sh/discord) for help.
|
||||
|
||||
It is strongly recommended to use [PowerShell 7 (`pwsh.exe`)](https://learn.microsoft.com/en-us/powershell/scripting/install/installing-powershell-on-windows?view=powershell-7.4) instead of the default `powershell.exe`.
|
||||
|
||||
@@ -44,18 +44,23 @@ By default, running unverified scripts are blocked.
|
||||
|
||||
### System Dependencies
|
||||
|
||||
- Bun 1.1 or later. We use Bun to run it's own code generators.
|
||||
Bun v1.1 or later. We use Bun to run it's own code generators.
|
||||
|
||||
```ps1
|
||||
> irm bun.sh/install.ps1 | iex
|
||||
```
|
||||
|
||||
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload.
|
||||
- Install Git and CMake from this installer, if not already installed.
|
||||
[Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload. While installing, make sure to install Git as well, if Git for Windows is not already installed.
|
||||
|
||||
Visual Studio can be installed graphically using the wizard or through WinGet:
|
||||
|
||||
```ps1
|
||||
> winget install "Visual Studio Community 2022" --override "--add Microsoft.VisualStudio.Workload.NativeDesktop Microsoft.VisualStudio.Component.Git " -s msstore
|
||||
```
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
|
||||
- LLVM 16
|
||||
- LLVM 18.1.8
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
@@ -64,24 +69,41 @@ After Visual Studio, you need the following:
|
||||
- Node.js
|
||||
|
||||
{% callout %}
|
||||
The Zig compiler is automatically downloaded, installed, and updated by the building process.
|
||||
**Note** – The Zig compiler is automatically downloaded, installed, and updated by the building process.
|
||||
{% /callout %}
|
||||
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily:
|
||||
[WinGet](https://learn.microsoft.com/windows/package-manager/winget) or [Scoop](https://scoop.sh) can be used to install these remaining tools easily:
|
||||
|
||||
```ps1
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ps1#WinGet
|
||||
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
|
||||
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
|
||||
```
|
||||
|
||||
```ps1#Scoop
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
> scoop install llvm@16.0.6
|
||||
> scoop install llvm@18.1.8
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
|
||||
```ps1
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ps1#WinGet
|
||||
> winget install ezwinports.make Cygwin.Cygwin Python.Python.3.12
|
||||
```
|
||||
|
||||
```ps1#Scoop
|
||||
> scoop install make cygwin python
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\env.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
There are four parts to the CI build:
|
||||
|
||||
- Dependencies: should be cached across builds as much as possible, it depends on git submodule hashes
|
||||
- Zig Object: depends on \*.zig and potentially src/js
|
||||
- Zig Object: depends on \*.zig and src/js
|
||||
- C++ Object: depends on \*.cpp and src/js
|
||||
- Linking: depends on the above three
|
||||
|
||||
@@ -15,7 +15,7 @@ BUN_DEPS_OUT_DIR="/optional/out/dir" bash ./scripts/all-dependencies.sh
|
||||
|
||||
## Zig Object
|
||||
|
||||
This does not have a dependency on WebKit or any of the dependencies at all. It can be compiled without checking out submodules, but you will need to have bun install run. It can be very easily cross compiled.
|
||||
This does not have a dependency on WebKit or any of the dependencies at all. It can be compiled without checking out submodules, but you will need to have bun install run. It can be very easily cross compiled. Note that the zig object is always `bun-zig.o`.
|
||||
|
||||
```sh
|
||||
BUN_REPO=/path/to/oven-sh/bun
|
||||
@@ -27,9 +27,9 @@ cmake $BUN_REPO \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCPU_TARGET="native" \
|
||||
-DZIG_TARGET="native" \
|
||||
-DBUN_ZIG_OBJ="./bun-zig.o"
|
||||
-DBUN_ZIG_OBJ_DIR="./build"
|
||||
|
||||
ninja ./bun-zig.o
|
||||
ninja ./build/bun-zig.o
|
||||
# -> bun-zig.o
|
||||
```
|
||||
|
||||
@@ -60,12 +60,12 @@ cmake $BUN_REPO \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="/path/to/bun-zig.o" \
|
||||
-DBUN_ZIG_OBJ_DIR="/path/to/bun-zig-dir" \
|
||||
-DBUN_CPP_ARCHIVE="/path/to/bun-cpp-objects.a"
|
||||
|
||||
ninja
|
||||
|
||||
# optiona:
|
||||
# optional:
|
||||
# -DBUN_DEPS_OUT_DIR=... custom deps dir, use this to cache the built deps between rebuilds
|
||||
# -DWEBKIT_DIR=... same thing, but it's probably fast enough to pull from github releases
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
../../LICENSE
|
||||
../../LICENSE.md
|
||||
@@ -1,8 +1,10 @@
|
||||
---
|
||||
name: Debugger
|
||||
name: Debugging
|
||||
---
|
||||
|
||||
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-vscode/types/jsc.d.ts), so you can debug your code with an interactive debugger. For demonstration purposes, consider the following simple web server.
|
||||
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-types/jsc.d.ts), so you can debug your code with an interactive debugger. For demonstration purposes, consider the following simple web server.
|
||||
|
||||
## Debugging JavaScript and TypeScript
|
||||
|
||||
```ts#server.ts
|
||||
Bun.serve({
|
||||
@@ -88,3 +90,236 @@ Here's a cheat sheet explaining the functions of the control flow buttons.
|
||||
- _Step out_ — If the current statement is a function call, the debugger will finish executing the call, then "step out" of the function to the location where it was called.
|
||||
|
||||
{% image src="https://github-production-user-asset-6210df.s3.amazonaws.com/3084745/261510346-6a94441c-75d3-413a-99a7-efa62365f83d.png" /%}
|
||||
|
||||
### Visual Studio Code Debugger
|
||||
|
||||
Experimental support for debugging Bun scripts is available in Visual Studio Code. To use it, you'll need to install the [Bun VSCode extension](https://bun.sh/guides/runtime/vscode-debugger).
|
||||
|
||||
## Debugging Network Requests
|
||||
|
||||
The `BUN_CONFIG_VERBOSE_FETCH` environment variable lets you log network requests made with `fetch()` or `node:http` automatically.
|
||||
|
||||
| Value | Description |
|
||||
| ------- | ---------------------------------- |
|
||||
| `curl` | Print requests as `curl` commands. |
|
||||
| `true` | Print request & response info |
|
||||
| `false` | Don't print anything. Default |
|
||||
|
||||
### Print fetch & node:http requests as curl commands
|
||||
|
||||
Bun also supports printing `fetch()` and `node:http` network requests as `curl` commands by setting the environment variable `BUN_CONFIG_VERBOSE_FETCH` to `curl`.
|
||||
|
||||
```ts
|
||||
process.env.BUN_CONFIG_VERBOSE_FETCH = "curl";
|
||||
|
||||
await fetch("https://example.com", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ foo: "bar" }),
|
||||
});
|
||||
```
|
||||
|
||||
This prints the `fetch` request as a single-line `curl` command to let you copy-paste into your terminal to replicate the request.
|
||||
|
||||
```sh
|
||||
[fetch] $ curl --http1.1 "https://example.com/" -X POST -H "content-type: application/json" -H "Connection: keep-alive" -H "User-Agent: Bun/1.1.14" -H "Accept: */*" -H "Host: example.com" -H "Accept-Encoding: gzip, deflate, br" --compressed -H "Content-Length: 13" --data-raw "{\"foo\":\"bar\"}"
|
||||
[fetch] > HTTP/1.1 POST https://example.com/
|
||||
[fetch] > content-type: application/json
|
||||
[fetch] > Connection: keep-alive
|
||||
[fetch] > User-Agent: Bun/1.1.14
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
[fetch] > Content-Length: 13
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Accept-Ranges: bytes
|
||||
[fetch] < Cache-Control: max-age=604800
|
||||
[fetch] < Content-Type: text/html; charset=UTF-8
|
||||
[fetch] < Date: Tue, 18 Jun 2024 05:12:07 GMT
|
||||
[fetch] < Etag: "3147526947"
|
||||
[fetch] < Expires: Tue, 25 Jun 2024 05:12:07 GMT
|
||||
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
|
||||
[fetch] < Server: EOS (vny/044F)
|
||||
[fetch] < Content-Length: 1256
|
||||
```
|
||||
|
||||
The lines with `[fetch] >` are the request from your local code, and the lines with `[fetch] <` are the response from the remote server.
|
||||
|
||||
The `BUN_CONFIG_VERBOSE_FETCH` environment variable is supported in both `fetch()` and `node:http` requests, so it should just work.
|
||||
|
||||
To print without the `curl` command, set `BUN_CONFIG_VERBOSE_FETCH` to `true`.
|
||||
|
||||
```ts
|
||||
process.env.BUN_CONFIG_VERBOSE_FETCH = "true";
|
||||
|
||||
await fetch("https://example.com", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ foo: "bar" }),
|
||||
});
|
||||
```
|
||||
|
||||
This prints the following to the console:
|
||||
|
||||
```sh
|
||||
[fetch] > HTTP/1.1 POST https://example.com/
|
||||
[fetch] > content-type: application/json
|
||||
[fetch] > Connection: keep-alive
|
||||
[fetch] > User-Agent: Bun/1.1.14
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
[fetch] > Content-Length: 13
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Accept-Ranges: bytes
|
||||
[fetch] < Cache-Control: max-age=604800
|
||||
[fetch] < Content-Type: text/html; charset=UTF-8
|
||||
[fetch] < Date: Tue, 18 Jun 2024 05:12:07 GMT
|
||||
[fetch] < Etag: "3147526947"
|
||||
[fetch] < Expires: Tue, 25 Jun 2024 05:12:07 GMT
|
||||
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
|
||||
[fetch] < Server: EOS (vny/044F)
|
||||
[fetch] < Content-Length: 1256
|
||||
```
|
||||
|
||||
## Stacktraces & sourcemaps
|
||||
|
||||
Bun transpiles every file, which sounds like it would mean that the stack traces you see in the console would unhelpfully point to the transpiled output. To address this, Bun automatically generates and serves sourcemapped files for every file it transpiles. When you see a stack trace in the console, you can click on the file path and be taken to the original source code, even though it was written in TypeScript or JSX, or has some other transformation applied.
|
||||
|
||||
<!-- TODO: uncomment once v1.1.13 regression is fixed (cc @paperdave) -->
|
||||
<!-- In Bun, each `Error` object gets four additional properties:
|
||||
|
||||
- `line` — the source-mapped line number. This number points to the input source code, not the transpiled output.
|
||||
- `column` — the source-mapped column number. This number points to the input source code, not the transpiled output.
|
||||
- `originalColumn` — the column number pointing to transpiled source code, without sourcemaps. This number comes from JavaScriptCore.
|
||||
- `originalLine` — the line number pointing to transpiled source code, without sourcemaps. This number comes from JavaScriptCore.
|
||||
|
||||
These properties are populated lazily when `error.stack` is accessed. -->
|
||||
|
||||
Bun automatically loads sourcemaps both at runtime when transpiling files on-demand, and when using `bun build` to precompile files ahead of time.
|
||||
|
||||
### Syntax-highlighted source code preview
|
||||
|
||||
To help with debugging, Bun automatically prints a small source-code preview when an unhandled exception or rejection occurs. You can simulate this behavior by calling `Bun.inspect(error)`:
|
||||
|
||||
```ts
|
||||
// Create an error
|
||||
const err = new Error("Something went wrong");
|
||||
console.log(Bun.inspect(err, { colors: true }));
|
||||
```
|
||||
|
||||
This prints a syntax-highlighted preview of the source code where the error occurred, along with the error message and stack trace.
|
||||
|
||||
```js
|
||||
1 | // Create an error
|
||||
2 | const err = new Error("Something went wrong");
|
||||
^
|
||||
error: Something went wrong
|
||||
at file.js:2:13
|
||||
```
|
||||
|
||||
### V8 Stack Traces
|
||||
|
||||
Bun uses JavaScriptCore as it's engine, but much of the Node.js ecosystem & npm expects V8. JavaScript engines differ in `error.stack` formatting. Bun intends to be a drop-in replacement for Node.js, and that means it's our job to make sure that even though the engine is different, the stack traces are as similar as possible.
|
||||
|
||||
That's why when you log `error.stack` in Bun, the formatting of `error.stack` is the same as in Node.js's V8 engine. This is especially useful when you're using libraries that expect V8 stack traces.
|
||||
|
||||
#### V8 Stack Trace API
|
||||
|
||||
Bun implements the [V8 Stack Trace API](https://v8.dev/docs/stack-trace-api), which is a set of functions that allow you to manipulate stack traces.
|
||||
|
||||
##### Error.prepareStackTrace
|
||||
|
||||
The `Error.prepareStackTrace` function is a global function that lets you customize the stack trace output. This function is called with the error object and an array of `CallSite` objects and lets you return a custom stack trace.
|
||||
|
||||
```ts
|
||||
Error.prepareStackTrace = (err, stack) => {
|
||||
return stack.map(callSite => {
|
||||
return callSite.getFileName();
|
||||
});
|
||||
};
|
||||
|
||||
const err = new Error("Something went wrong");
|
||||
console.log(err.stack);
|
||||
// [ "error.js" ]
|
||||
```
|
||||
|
||||
The `CallSite` object has the following methods:
|
||||
|
||||
| Method | Returns |
|
||||
| -------------------------- | ----------------------------------------------------- |
|
||||
| `getThis` | `this` value of the function call |
|
||||
| `getTypeName` | typeof `this` |
|
||||
| `getFunction` | function object |
|
||||
| `getFunctionName` | function name as a string |
|
||||
| `getMethodName` | method name as a string |
|
||||
| `getFileName` | file name or URL |
|
||||
| `getLineNumber` | line number |
|
||||
| `getColumnNumber` | column number |
|
||||
| `getEvalOrigin` | `undefined` |
|
||||
| `getScriptNameOrSourceURL` | source URL |
|
||||
| `isToplevel` | returns `true` if the function is in the global scope |
|
||||
| `isEval` | returns `true` if the function is an `eval` call |
|
||||
| `isNative` | returns `true` if the function is native |
|
||||
| `isConstructor` | returns `true` if the function is a constructor |
|
||||
| `isAsync` | returns `true` if the function is `async` |
|
||||
| `isPromiseAll` | Not implemented yet. |
|
||||
| `getPromiseIndex` | Not implemented yet. |
|
||||
| `toString` | returns a string representation of the call site |
|
||||
|
||||
In some cases, the `Function` object may have already been garbage collected, so some of these methods may return `undefined`.
|
||||
|
||||
##### Error.captureStackTrace(error, startFn)
|
||||
|
||||
The `Error.captureStackTrace` function lets you capture a stack trace at a specific point in your code, rather than at the point where the error was thrown.
|
||||
|
||||
This can be helpful when you have callbacks or asynchronous code that makes it difficult to determine where an error originated. The 2nd argument to `Error.captureStackTrace` is the function where you want the stack trace to start.
|
||||
|
||||
For example, the below code will make `err.stack` point to the code calling `fn()`, even though the error was thrown at `myInner`.
|
||||
|
||||
```ts
|
||||
const fn = () => {
|
||||
function myInner() {
|
||||
throw err;
|
||||
}
|
||||
|
||||
try {
|
||||
myInner();
|
||||
} catch (err) {
|
||||
console.log(err.stack);
|
||||
console.log("");
|
||||
console.log("-- captureStackTrace --");
|
||||
console.log("");
|
||||
Error.captureStackTrace(err, fn);
|
||||
console.log(err.stack);
|
||||
}
|
||||
};
|
||||
|
||||
fn();
|
||||
```
|
||||
|
||||
This logs the following:
|
||||
|
||||
```sh
|
||||
Error: here!
|
||||
at myInner (file.js:4:15)
|
||||
at fn (file.js:8:5)
|
||||
at module code (file.js:17:1)
|
||||
at moduleEvaluation (native)
|
||||
at moduleEvaluation (native)
|
||||
at <anonymous> (native)
|
||||
|
||||
-- captureStackTrace --
|
||||
|
||||
Error: here!
|
||||
at module code (file.js:17:1)
|
||||
at moduleEvaluation (native)
|
||||
at moduleEvaluation (native)
|
||||
at <anonymous> (native)
|
||||
```
|
||||
|
||||
@@ -143,6 +143,16 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
|
||||
---
|
||||
|
||||
- `NODE_TLS_REJECT_UNAUTHORIZED`
|
||||
- `NODE_TLS_REJECT_UNAUTHORIZED=0` disables SSL certificate validation. This is useful for testing and debugging, but you should be very hesitant to use this in production. Note: This environment variable was originally introduced by Node.js and we kept the name for compatibility.
|
||||
|
||||
---
|
||||
|
||||
- `BUN_CONFIG_VERBOSE_FETCH`
|
||||
- If `BUN_CONFIG_VERBOSE_FETCH=curl`, then fetch requests will log the url, method, request headers and response headers to the console. This is useful for debugging network requests. This also works with `node:http`. `BUN_CONFIG_VERBOSE_FETCH=1` is equivalent to `BUN_CONFIG_VERBOSE_FETCH=curl` except without the `curl` output.
|
||||
|
||||
---
|
||||
|
||||
- `BUN_RUNTIME_TRANSPILER_CACHE_PATH`
|
||||
- The runtime transpiler caches the transpiled output of source files larger than 50 kb. This makes CLIs using Bun load faster. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is set, then the runtime transpiler will cache transpiled output to the specified directory. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is set to an empty string or the string `"0"`, then the runtime transpiler will not cache transpiled output. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is unset, then the runtime transpiler will cache transpiled output to the platform-specific cache directory.
|
||||
|
||||
@@ -169,12 +179,12 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
---
|
||||
|
||||
- `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD`
|
||||
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=1`, then `bun --watch` will not clear the console on reload
|
||||
- If `BUN_CONFIG_NO_CLEAR_TERMINAL_ON_RELOAD=true`, then `bun --watch` will not clear the console on reload
|
||||
|
||||
---
|
||||
|
||||
- `DO_NOT_TRACK`
|
||||
- Telemetry is not sent yet as of November 28th, 2023, but we are planning to add telemetry in the coming months. If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.
|
||||
- Disable uploading crash reports to `bun.report` on crash. On macOS & Windows, crash report uploads are enabled by default. Otherwise, telemetry is not sent yet as of May 21st, 2024, but we are planning to add telemetry in the coming weeks. If `DO_NOT_TRACK=1`, then auto-uploading crash reports and telemetry are both [disabled](https://do-not-track.dev/).
|
||||
|
||||
{% /table %}
|
||||
|
||||
|
||||
@@ -48,14 +48,6 @@ In this case, we are importing from `./hello`, a relative path with no extension
|
||||
- `./hello/index.cjs`
|
||||
- `./hello/index.json`
|
||||
|
||||
Import paths are case-insensitive, meaning these are all valid imports:
|
||||
|
||||
```ts#index.ts
|
||||
import { hello } from "./hello";
|
||||
import { hello } from "./HELLO";
|
||||
import { hello } from "./hElLo";
|
||||
```
|
||||
|
||||
Import paths can optionally include extensions. If an extension is present, Bun will only check for a file with that exact extension.
|
||||
|
||||
```ts#index.ts
|
||||
|
||||
@@ -18,7 +18,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:child_process`](https://nodejs.org/api/child_process.html)
|
||||
|
||||
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC cannot send socket handles and only works with other `bun` processes.
|
||||
🟡 Missing `proc.gid` `proc.uid`. `Stream` class not exported. IPC cannot send socket handles. Node.js <> Bun IPC can be used with JSON serialization.
|
||||
|
||||
### [`node:cluster`](https://nodejs.org/api/cluster.html)
|
||||
|
||||
@@ -53,7 +53,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:events`](https://nodejs.org/api/events.html)
|
||||
|
||||
🟡 Missing `addAbortListener` `events.getMaxListeners`
|
||||
🟡 `events.addAbortListener` & `events.getMaxListeners` do not support (web api) `EventTarget`
|
||||
|
||||
### [`node:fs`](https://nodejs.org/api/fs.html)
|
||||
|
||||
@@ -61,7 +61,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:http`](https://nodejs.org/api/http.html)
|
||||
|
||||
🟢 Fully implemented.
|
||||
🟢 Fully implemented. Outgoing client request body is currently buffered instead of streamed.
|
||||
|
||||
### [`node:http2`](https://nodejs.org/api/http2.html)
|
||||
|
||||
@@ -69,7 +69,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:https`](https://nodejs.org/api/https.html)
|
||||
|
||||
🟢 Fully implemented.
|
||||
🟢 APIs are implemented, but `Agent` is not always used yet.
|
||||
|
||||
### [`node:inspector`](https://nodejs.org/api/inspector.html)
|
||||
|
||||
@@ -169,7 +169,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:worker_threads`](https://nodejs.org/api/worker_threads.html)
|
||||
|
||||
🟡 `Worker` doesn't support the following options: `eval` `stdin` `stdout` `stderr` `trackedUnmanagedFds` `resourceLimits`. Missing `markAsUntransferable` `moveMessagePortToContext` `getHeapSnapshot`.
|
||||
🟡 `Worker` doesn't support the following options: `stdin` `stdout` `stderr` `trackedUnmanagedFds` `resourceLimits`. Missing `markAsUntransferable` `moveMessagePortToContext` `getHeapSnapshot`.
|
||||
|
||||
### [`node:zlib`](https://nodejs.org/api/zlib.html)
|
||||
|
||||
@@ -193,7 +193,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer)
|
||||
|
||||
🟡 Incomplete implementation of `base64` and `base64url` encodings.
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)
|
||||
|
||||
@@ -433,7 +433,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
|
||||
|
||||
🟡 `URL.createObjectURL` is missing. See [Issue #3925](https://github.com/oven-sh/bun/issues/3925)
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
|
||||
|
||||
|
||||
@@ -418,7 +418,7 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
|
||||
|
||||
**Not** implemented yet, but planned:
|
||||
|
||||
- See https://github.com/oven-sh/bun/issues/9716 for the full list.
|
||||
- See [Issue #9716](https://github.com/oven-sh/bun/issues/9716) for the full list.
|
||||
|
||||
## Utilities
|
||||
|
||||
|
||||
@@ -63,3 +63,29 @@ Internally, Bun transpiles all files by default, so Bun automatically generates
|
||||
[test]
|
||||
coverageIgnoreSourcemaps = true # default false
|
||||
```
|
||||
|
||||
### Coverage reporters
|
||||
|
||||
By default, coverage reports will be printed to the console.
|
||||
|
||||
For persistent code coverage reports in CI environments and for other tools, you can pass a `--coverage-reporter=lcov` CLI option or `coverageReporter` option in `bunfig.toml`.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageReporter = ["text", "lcov"] # default ["text"]
|
||||
coverageDir = "path/to/somewhere" # default "coverage"
|
||||
```
|
||||
|
||||
| Reporter | Description |
|
||||
| -------- | --------------------------------------------------------------------------- |
|
||||
| `text` | Prints a text summary of the coverage to the console. |
|
||||
| `lcov` | Save coverage in [lcov](https://github.com/linux-test-project/lcov) format. |
|
||||
|
||||
#### lcov coverage reporter
|
||||
|
||||
To generate an lcov report, you can use the `lcov` reporter. This will generate an `lcov.info` file in the `coverage` directory.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
coverageReporter = "lcov"
|
||||
```
|
||||
|
||||
@@ -196,3 +196,41 @@ As of Bun v1.0.19, Bun automatically resolves the `specifier` argument to `mock.
|
||||
After resolution, the mocked module is stored in the ES Module registry **and** the CommonJS require cache. This means that you can use `import` and `require` interchangeably for mocked modules.
|
||||
|
||||
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
|
||||
|
||||
## Restore all function mocks to their original values with `mock.restore()`
|
||||
|
||||
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
|
||||
|
||||
Using `mock.restore()` can reduce the amount of code in your tests by adding it to `afterEach` blocks in each test file or even in your [test preload code](https://bun.sh/docs/runtime/bunfig#test-preload).
|
||||
|
||||
```ts
|
||||
import { expect, mock, spyOn, test } from "bun:test";
|
||||
|
||||
import * as fooModule from './foo.ts';
|
||||
import * as barModule from './bar.ts';
|
||||
import * as bazModule from './baz.ts';
|
||||
|
||||
test('foo, bar, baz', () => {
|
||||
const fooSpy = spyOn(fooModule, 'foo');
|
||||
const barSpy = spyOn(barModule, 'bar');
|
||||
const bazSpy = spyOn(bazModule, 'baz');
|
||||
|
||||
expect(fooSpy).toBe('foo');
|
||||
expect(barSpy).toBe('bar');
|
||||
expect(bazSpy).toBe('baz');
|
||||
|
||||
fooSpy.mockImplementation(() => 42);
|
||||
barSpy.mockImplementation(() => 43);
|
||||
bazSpy.mockImplementation(() => 44);
|
||||
|
||||
expect(fooSpy).toBe(42);
|
||||
expect(barSpy).toBe(43);
|
||||
expect(bazSpy).toBe(44);
|
||||
|
||||
mock.restore();
|
||||
|
||||
expect(fooSpy).toBe('foo');
|
||||
expect(barSpy).toBe('bar');
|
||||
expect(bazSpy).toBe('baz');
|
||||
});
|
||||
```
|
||||
|
||||
@@ -305,6 +305,30 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- [`.toContainAllKeys()`](https://jest-extended.jestcommunity.dev/docs/matchers/Object#tocontainallkeyskeys)
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- [`.toContainValue()`](https://jest-extended.jestcommunity.dev/docs/matchers/Object#tocontainvaluevalue)
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- [`.toContainValues()`](https://jest-extended.jestcommunity.dev/docs/matchers/Object#tocontainvaluesvalues)
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- [`.toContainAllValues()`](https://jest-extended.jestcommunity.dev/docs/matchers/Object#tocontainallvaluesvalues)
|
||||
|
||||
---
|
||||
- ✅
|
||||
- [`.toContainAnyValues()`](https://jest-extended.jestcommunity.dev/docs/matchers/Object#tocontainanyvaluesvalues)
|
||||
|
||||
---
|
||||
|
||||
- ✅
|
||||
- [`.toStrictEqual()`](https://jestjs.io/docs/expect#tostrictequalvalue)
|
||||
|
||||
@@ -415,12 +439,12 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveReturned()`](https://jestjs.io/docs/expect#tohavereturned)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toHaveReturnedTimes()`](https://jestjs.io/docs/expect#tohavereturnedtimesnumber)
|
||||
|
||||
---
|
||||
|
||||
@@ -3,7 +3,7 @@ const std = @import("std");
|
||||
const CompressionFramework = struct {
|
||||
var handle: ?*anyopaque = null;
|
||||
pub fn load() !void {
|
||||
handle = std.os.darwin.dlopen("libcompression.dylib", 1);
|
||||
handle = std.posix.darwin.dlopen("libcompression.dylib", 1);
|
||||
|
||||
if (handle == null)
|
||||
return error.@"failed to load Compression.framework";
|
||||
@@ -247,7 +247,7 @@ pub fn main() anyerror!void {
|
||||
|
||||
if (algorithm == null or operation == null) {
|
||||
try std.io.getStdErr().writer().print("to compress: {s} ./file ./out.{{br,gz,lz4,lzfse}}\nto decompress: {s} ./out.{{br,gz,lz4,lzfse}} ./out\n", .{ argv0, argv0 });
|
||||
std.os.exit(1);
|
||||
std.posix.exit(1);
|
||||
}
|
||||
|
||||
var output_file: std.fs.File = undefined;
|
||||
|
||||
@@ -62,8 +62,8 @@ const MethodNames = std.ComptimeStringMap(Method, .{
|
||||
.{ "head", Method.HEAD },
|
||||
});
|
||||
|
||||
var file_path_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
|
||||
var cwd_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
|
||||
var file_path_buf: bun.PathBuffer = undefined;
|
||||
var cwd_buf: bun.PathBuffer = undefined;
|
||||
|
||||
pub const Arguments = struct {
|
||||
url: URL,
|
||||
@@ -182,7 +182,7 @@ pub fn main() anyerror!void {
|
||||
|
||||
try channel.buffer.ensureTotalCapacity(1);
|
||||
|
||||
try HTTPThread.init();
|
||||
HTTPThread.init();
|
||||
|
||||
var ctx = try default_allocator.create(HTTP.HTTPChannelContext);
|
||||
ctx.* = .{
|
||||
|
||||
@@ -63,8 +63,8 @@ const MethodNames = std.ComptimeStringMap(Method, .{
|
||||
.{ "head", Method.HEAD },
|
||||
});
|
||||
|
||||
var file_path_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
|
||||
var cwd_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
|
||||
var file_path_buf: bun.PathBuffer = undefined;
|
||||
var cwd_buf: bun.PathBuffer = undefined;
|
||||
|
||||
pub const Arguments = struct {
|
||||
url: URL,
|
||||
@@ -198,7 +198,7 @@ pub fn main() anyerror!void {
|
||||
try channel.buffer.ensureTotalCapacity(args.count);
|
||||
|
||||
try NetworkThread.init();
|
||||
if (args.concurrency > 0) HTTP.AsyncHTTP.max_simultaneous_requests.store(args.concurrency, .Monotonic);
|
||||
if (args.concurrency > 0) HTTP.AsyncHTTP.max_simultaneous_requests.store(args.concurrency, .monotonic);
|
||||
const Group = struct {
|
||||
response_body: MutableString = undefined,
|
||||
context: HTTP.HTTPChannelContext = undefined,
|
||||
|
||||
@@ -126,11 +126,11 @@ pub fn main() anyerror!void {
|
||||
Output.prettyErrorln("For {d} messages and {d} threads:", .{ count, thread_count });
|
||||
Output.flush();
|
||||
defer Output.flush();
|
||||
const runs = if (std.os.getenv("RUNS")) |run_count| try std.fmt.parseInt(usize, run_count, 10) else 1;
|
||||
const runs = if (std.posix.getenv("RUNS")) |run_count| try std.fmt.parseInt(usize, run_count, 10) else 1;
|
||||
|
||||
if (std.os.getenv("NO_MACH") == null)
|
||||
if (std.posix.getenv("NO_MACH") == null)
|
||||
try machMain(runs);
|
||||
|
||||
if (std.os.getenv("NO_USER") == null)
|
||||
if (std.posix.getenv("NO_USER") == null)
|
||||
try userMain(runs);
|
||||
}
|
||||
|
||||
@@ -29,12 +29,12 @@ pub fn main() anyerror!void {
|
||||
const to_resolve = args[args.len - 1];
|
||||
const cwd = try bun.getcwdAlloc(allocator);
|
||||
var path: []u8 = undefined;
|
||||
var out_buffer: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
var out_buffer: bun.PathBuffer = undefined;
|
||||
|
||||
var j: usize = 0;
|
||||
while (j < 1000) : (j += 1) {
|
||||
var parts = [1][]const u8{to_resolve};
|
||||
var joined_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
var joined_buf: bun.PathBuffer = undefined;
|
||||
var joined = path_handler.joinAbsStringBuf(
|
||||
cwd,
|
||||
&joined_buf,
|
||||
@@ -42,11 +42,11 @@ pub fn main() anyerror!void {
|
||||
.loose,
|
||||
);
|
||||
joined_buf[joined.len] = 0;
|
||||
const os = std.os;
|
||||
const os = std.posix;
|
||||
const joined_z: [:0]const u8 = joined_buf[0..joined.len :0];
|
||||
const O_PATH = if (@hasDecl(os.O, "PATH")) os.O.PATH else 0;
|
||||
const O_PATH = if (@hasDecl(bun.O, "PATH")) bun.O.PATH else 0;
|
||||
|
||||
var file = std.os.openZ(joined_z, O_PATH | std.os.O.CLOEXEC, 0) catch |err| {
|
||||
var file = std.posix.openZ(joined_z, O_PATH | bun.O.CLOEXEC, 0) catch |err| {
|
||||
switch (err) {
|
||||
error.NotDir, error.FileNotFound => {
|
||||
Output.prettyError("<r><red>404 Not Found<r>: <b>\"{s}\"<r>", .{joined_z});
|
||||
|
||||
@@ -27,12 +27,12 @@ pub fn main() anyerror!void {
|
||||
var args = std.mem.bytesAsSlice([]u8, try std.process.argsAlloc(allocator));
|
||||
|
||||
const to_resolve = args[args.len - 1];
|
||||
var out_buffer: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
var out_buffer: bun.PathBuffer = undefined;
|
||||
var path: []u8 = undefined;
|
||||
|
||||
var j: usize = 0;
|
||||
while (j < 1000) : (j += 1) {
|
||||
path = try std.os.realpathZ(to_resolve, &out_buffer);
|
||||
path = try std.posix.realpathZ(to_resolve, &out_buffer);
|
||||
}
|
||||
|
||||
Output.print("{s}", .{path});
|
||||
|
||||
@@ -37,7 +37,7 @@ pub fn main() anyerror!void {
|
||||
Global.exit(1);
|
||||
}
|
||||
|
||||
var tarball_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
var tarball_path_buf: bun.PathBuffer = undefined;
|
||||
var basename = std.fs.path.basename(bun.asByteSlice(args[args.len - 1]));
|
||||
while (RecognizedExtensions.has(std.fs.path.extension(basename))) {
|
||||
basename = basename[0 .. basename.len - std.fs.path.extension(basename).len];
|
||||
@@ -88,8 +88,9 @@ pub fn main() anyerror!void {
|
||||
null,
|
||||
void,
|
||||
void{},
|
||||
1,
|
||||
false,
|
||||
false,
|
||||
.{
|
||||
.depth_to_skip = 1,
|
||||
.close_handles = false,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
41
package.json
41
package.json
@@ -4,24 +4,22 @@
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vscode/debugadapter": "^1.61.0",
|
||||
"esbuild": "^0.17.15",
|
||||
"eslint": "^8.20.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"mitata": "^0.1.3",
|
||||
"devDependencies": {
|
||||
"@vscode/debugadapter": "^1.65.0",
|
||||
"esbuild": "^0.21.4",
|
||||
"eslint": "^9.4.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
"prettier": "^3.2.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"source-map-js": "^1.0.2",
|
||||
"typescript": "^5.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.1.2",
|
||||
"@types/react": "^18.0.25",
|
||||
"@typescript-eslint/eslint-plugin": "^5.31.0",
|
||||
"@typescript-eslint/parser": "^5.31.0"
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.4.5",
|
||||
"@types/bun": "^1.1.3",
|
||||
"@types/react": "^18.3.3",
|
||||
"@typescript-eslint/eslint-plugin": "^7.11.0",
|
||||
"@typescript-eslint/parser": "^7.11.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"bun-types": "workspace:packages/bun-types"
|
||||
@@ -34,6 +32,8 @@
|
||||
"build:tidy": "BUN_SILENT=1 cmake --log-level=WARNING . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout} && BUN_SILENT=1 ninja -Cbuild-tidy >> ${GITHUB_STEP_SUMMARY:-/dev/stdout}",
|
||||
"build:tidy-extra": "cmake . -DZIG_OPTIMIZE=Debug -DUSE_DEBUG_JSC=ON -DBUN_TIDY_ONLY_EXTRA=ON -DCMAKE_BUILD_TYPE=Debug -GNinja -Bbuild-tidy && ninja -Cbuild-tidy",
|
||||
"build:release": "cmake . -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:release:local": "cmake . -DCMAKE_BUILD_TYPE=Release -DWEBKIT_DIR=$(pwd)/src/bun.js/WebKit/WebKitBuild/Release -GNinja -Bbuild-release-local && ninja -Cbuild-release-local",
|
||||
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"build:safe": "cmake . -DZIG_OPTIMIZE=ReleaseSafe -DUSE_DEBUG_JSC=ON -DCMAKE_BUILD_TYPE=Release -GNinja -Bbuild-safe && ninja -Cbuild-safe",
|
||||
"build:windows": "cmake -B build -S . -G Ninja -DCMAKE_BUILD_TYPE=Debug && ninja -Cbuild",
|
||||
@@ -42,7 +42,12 @@
|
||||
"fmt:zig": "zig fmt src/*.zig src/*/*.zig src/*/*/*.zig src/*/*/*/*.zig",
|
||||
"lint": "eslint './**/*.d.ts' --cache",
|
||||
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
|
||||
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
|
||||
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun"
|
||||
"test": "node scripts/runner.node.mjs ./build/bun-debug",
|
||||
"test:release": "node scripts/runner.node.mjs ./build-release/bun",
|
||||
"banned": "bun packages/bun-internal-test/src/linter.ts",
|
||||
"zig-check": ".cache/zig/zig.exe build check --summary new",
|
||||
"zig-check-all": ".cache/zig/zig.exe build check-all --summary new",
|
||||
"zig-check-windows": ".cache/zig/zig.exe build check-windows --summary new",
|
||||
"zig": ".cache/zig/zig.exe "
|
||||
}
|
||||
}
|
||||
|
||||
@@ -511,13 +511,7 @@ const SourceLines = ({
|
||||
);
|
||||
};
|
||||
|
||||
const BuildErrorSourceLines = ({
|
||||
location,
|
||||
filename,
|
||||
}: {
|
||||
location: Location;
|
||||
filename: string;
|
||||
}) => {
|
||||
const BuildErrorSourceLines = ({ location, filename }: { location: Location; filename: string }) => {
|
||||
const { line, line_text, column } = location;
|
||||
const sourceLines: SourceLine[] = [{ line, text: line_text }];
|
||||
const buildURL = React.useCallback((line, column) => srcFileURL(filename, line, column), [srcFileURL, filename]);
|
||||
@@ -612,7 +606,7 @@ const NativeStackFrame = ({
|
||||
const {
|
||||
file,
|
||||
function_name: functionName,
|
||||
position: { line, column_start: column },
|
||||
position: { line, column },
|
||||
scope,
|
||||
} = frame;
|
||||
const fileName = normalizedFilename(file, cwd);
|
||||
@@ -689,21 +683,21 @@ const NativeStackTrace = ({
|
||||
return (
|
||||
<div ref={ref} className={`BunError-NativeStackTrace`}>
|
||||
<a
|
||||
href={urlBuilder(filename, position.line, position.column_start)}
|
||||
href={urlBuilder(filename, position.line, position.column)}
|
||||
data-line={position.line}
|
||||
data-column={position.column_start}
|
||||
data-column={position.column}
|
||||
data-is-client="true"
|
||||
target="_blank"
|
||||
onClick={openWithoutFlashOfNewTab}
|
||||
className="BunError-NativeStackTrace-filename"
|
||||
>
|
||||
{filename}:{position.line}:{position.column_start}
|
||||
{filename}:{position.line}:{position.column}
|
||||
</a>
|
||||
{sourceLines.length > 0 && (
|
||||
<SourceLines
|
||||
highlight={position.line}
|
||||
sourceLines={sourceLines}
|
||||
highlightColumnStart={position.column_start}
|
||||
highlightColumnStart={position.column}
|
||||
buildURL={buildURL}
|
||||
highlightColumnEnd={position.column_stop}
|
||||
>
|
||||
@@ -715,7 +709,7 @@ const NativeStackTrace = ({
|
||||
highlight={position.line}
|
||||
sourceLines={sourceLines}
|
||||
setSourceLines={setSourceLines}
|
||||
highlightColumnStart={position.column_start}
|
||||
highlightColumnStart={position.column}
|
||||
buildURL={buildURL}
|
||||
highlightColumnEnd={position.column_stop}
|
||||
>
|
||||
@@ -737,13 +731,7 @@ const Indent = ({ by, children }) => {
|
||||
);
|
||||
};
|
||||
|
||||
const JSException = ({
|
||||
value,
|
||||
isClient = false,
|
||||
}: {
|
||||
value: JSExceptionType;
|
||||
isClient: boolean;
|
||||
}) => {
|
||||
const JSException = ({ value, isClient = false }: { value: JSExceptionType; isClient: boolean }) => {
|
||||
const tag = isClient ? ErrorTagType.client : ErrorTagType.server;
|
||||
const [sourceLines, _setSourceLines] = React.useState(value?.stack?.source_lines ?? []);
|
||||
var message = value.message || "";
|
||||
@@ -791,7 +779,7 @@ const JSException = ({
|
||||
sourceLines={sourceLines}
|
||||
setSourceLines={setSourceLines}
|
||||
>
|
||||
<Indent by={value.stack.frames[0].position.column_start}>
|
||||
<Indent by={value.stack.frames[0].position.column}>
|
||||
<span className="BunError-error-typename">{fancyTypeError.runtimeTypeName}</span>
|
||||
</Indent>
|
||||
</NativeStackTrace>
|
||||
@@ -853,13 +841,7 @@ const JSException = ({
|
||||
}
|
||||
};
|
||||
|
||||
const Summary = ({
|
||||
errorCount,
|
||||
onClose,
|
||||
}: {
|
||||
errorCount: number;
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const Summary = ({ errorCount, onClose }: { errorCount: number; onClose: () => void }) => {
|
||||
return (
|
||||
<div className="BunError-Summary">
|
||||
<div className="BunError-Summary-ErrorIcon"></div>
|
||||
@@ -1001,11 +983,7 @@ const Footer = ({ toMarkdown, data }) => (
|
||||
</div>
|
||||
);
|
||||
|
||||
const BuildFailureMessageContainer = ({
|
||||
messages,
|
||||
}: {
|
||||
messages: Message[];
|
||||
}) => {
|
||||
const BuildFailureMessageContainer = ({ messages }: { messages: Message[] }) => {
|
||||
return (
|
||||
<div id="BunErrorOverlay-container">
|
||||
<div className="BunError-content">
|
||||
@@ -1153,14 +1131,14 @@ export function renderRuntimeError(error: Error) {
|
||||
file: error[fileNameProperty] || "",
|
||||
position: {
|
||||
line: +error[lineNumberProperty] || 1,
|
||||
column_start: +error[columnNumberProperty] || 1,
|
||||
column: +error[columnNumberProperty] || 1,
|
||||
},
|
||||
} as StackFrame);
|
||||
} else if (exception.stack && exception.stack.frames.length > 0) {
|
||||
exception.stack.frames[0].position.line = error[lineNumberProperty];
|
||||
|
||||
if (Number.isFinite(error[columnNumberProperty])) {
|
||||
exception.stack.frames[0].position.column_start = error[columnNumberProperty];
|
||||
exception.stack.frames[0].position.column = error[columnNumberProperty];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1214,27 +1192,27 @@ export function renderRuntimeError(error: Error) {
|
||||
}
|
||||
var frame = exception.stack.frames[frameIndex];
|
||||
|
||||
const { line, column_start } = frame.position;
|
||||
const remapped = remapPosition(mappings, line, column_start);
|
||||
const { line, column } = frame.position;
|
||||
const remapped = remapPosition(mappings, line, column);
|
||||
if (!remapped) return null;
|
||||
frame.position.line_start = frame.position.line = remapped[0];
|
||||
frame.position.column_stop =
|
||||
frame.position.expression_stop =
|
||||
frame.position.expression_start =
|
||||
frame.position.column_start =
|
||||
frame.position.column =
|
||||
remapped[1];
|
||||
}, console.error);
|
||||
} else {
|
||||
if (!mappings) return null;
|
||||
var frame = exception.stack.frames[frameIndex];
|
||||
const { line, column_start } = frame.position;
|
||||
const remapped = remapPosition(mappings, line, column_start);
|
||||
const { line, column } = frame.position;
|
||||
const remapped = remapPosition(mappings, line, column);
|
||||
if (!remapped) return null;
|
||||
frame.position.line_start = frame.position.line = remapped[0];
|
||||
frame.position.column_stop =
|
||||
frame.position.expression_stop =
|
||||
frame.position.expression_start =
|
||||
frame.position.column_start =
|
||||
frame.position.column =
|
||||
remapped[1];
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1518,7 +1518,10 @@
|
||||
"id": "EventMetadata",
|
||||
"description": "A key-value pair for additional event information to pass along.",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "key", "type": "string" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "key", "type": "string" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "BackgroundServiceEvent",
|
||||
@@ -1570,7 +1573,10 @@
|
||||
{
|
||||
"name": "setRecording",
|
||||
"description": "Set the recording state for the service.",
|
||||
"parameters": [{ "name": "shouldRecord", "type": "boolean" }, { "name": "service", "$ref": "ServiceName" }]
|
||||
"parameters": [
|
||||
{ "name": "shouldRecord", "type": "boolean" },
|
||||
{ "name": "service", "$ref": "ServiceName" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "clearEvents",
|
||||
@@ -1582,7 +1588,10 @@
|
||||
{
|
||||
"name": "recordingStateChanged",
|
||||
"description": "Called when the recording state for the service has been updated.",
|
||||
"parameters": [{ "name": "isRecording", "type": "boolean" }, { "name": "service", "$ref": "ServiceName" }]
|
||||
"parameters": [
|
||||
{ "name": "isRecording", "type": "boolean" },
|
||||
{ "name": "service", "$ref": "ServiceName" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "backgroundServiceEventReceived",
|
||||
@@ -2072,7 +2081,10 @@
|
||||
{
|
||||
"id": "Header",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "name", "type": "string" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "name", "type": "string" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "CachedResponse",
|
||||
@@ -3442,7 +3454,10 @@
|
||||
{
|
||||
"name": "setStyleSheetText",
|
||||
"description": "Sets the new stylesheet text.",
|
||||
"parameters": [{ "name": "styleSheetId", "$ref": "StyleSheetId" }, { "name": "text", "type": "string" }],
|
||||
"parameters": [
|
||||
{ "name": "styleSheetId", "$ref": "StyleSheetId" },
|
||||
{ "name": "text", "type": "string" }
|
||||
],
|
||||
"returns": [
|
||||
{
|
||||
"name": "sourceMapURL",
|
||||
@@ -3567,7 +3582,10 @@
|
||||
},
|
||||
{
|
||||
"name": "executeSQL",
|
||||
"parameters": [{ "name": "databaseId", "$ref": "DatabaseId" }, { "name": "query", "type": "string" }],
|
||||
"parameters": [
|
||||
{ "name": "databaseId", "$ref": "DatabaseId" },
|
||||
{ "name": "query", "type": "string" }
|
||||
],
|
||||
"returns": [
|
||||
{ "name": "columnNames", "optional": true, "type": "array", "items": { "type": "string" } },
|
||||
{ "name": "values", "optional": true, "type": "array", "items": { "type": "any" } },
|
||||
@@ -3608,7 +3626,10 @@
|
||||
{
|
||||
"name": "selectPrompt",
|
||||
"description": "Select a device in response to a DeviceAccess.deviceRequestPrompted event.",
|
||||
"parameters": [{ "name": "id", "$ref": "RequestId" }, { "name": "deviceId", "$ref": "DeviceId" }]
|
||||
"parameters": [
|
||||
{ "name": "id", "$ref": "RequestId" },
|
||||
{ "name": "deviceId", "$ref": "DeviceId" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "cancelPrompt",
|
||||
@@ -5656,7 +5677,10 @@
|
||||
},
|
||||
{
|
||||
"name": "removeDOMStorageItem",
|
||||
"parameters": [{ "name": "storageId", "$ref": "StorageId" }, { "name": "key", "type": "string" }]
|
||||
"parameters": [
|
||||
{ "name": "storageId", "$ref": "StorageId" },
|
||||
{ "name": "key", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "setDOMStorageItem",
|
||||
@@ -5678,7 +5702,10 @@
|
||||
},
|
||||
{
|
||||
"name": "domStorageItemRemoved",
|
||||
"parameters": [{ "name": "storageId", "$ref": "StorageId" }, { "name": "key", "type": "string" }]
|
||||
"parameters": [
|
||||
{ "name": "storageId", "$ref": "StorageId" },
|
||||
{ "name": "key", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "domStorageItemUpdated",
|
||||
@@ -5748,7 +5775,10 @@
|
||||
{
|
||||
"id": "MediaFeature",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "name", "type": "string" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "name", "type": "string" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "VirtualTimePolicy",
|
||||
@@ -5762,7 +5792,10 @@
|
||||
"description": "Used to specify User Agent Cient Hints to emulate. See https://wicg.github.io/ua-client-hints",
|
||||
"experimental": true,
|
||||
"type": "object",
|
||||
"properties": [{ "name": "brand", "type": "string" }, { "name": "version", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "brand", "type": "string" },
|
||||
{ "name": "version", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "UserAgentMetadata",
|
||||
@@ -6120,7 +6153,10 @@
|
||||
"name": "setSensorOverrideReadings",
|
||||
"description": "Updates the sensor readings reported by a sensor type previously overriden\nby setSensorOverrideEnabled.",
|
||||
"experimental": true,
|
||||
"parameters": [{ "name": "type", "$ref": "SensorType" }, { "name": "reading", "$ref": "SensorReading" }]
|
||||
"parameters": [
|
||||
{ "name": "type", "$ref": "SensorType" },
|
||||
{ "name": "reading", "$ref": "SensorReading" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "setIdleOverride",
|
||||
@@ -6405,11 +6441,17 @@
|
||||
{ "name": "disable" },
|
||||
{
|
||||
"name": "selectAccount",
|
||||
"parameters": [{ "name": "dialogId", "type": "string" }, { "name": "accountIndex", "type": "integer" }]
|
||||
"parameters": [
|
||||
{ "name": "dialogId", "type": "string" },
|
||||
{ "name": "accountIndex", "type": "integer" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "clickDialogButton",
|
||||
"parameters": [{ "name": "dialogId", "type": "string" }, { "name": "dialogButton", "$ref": "DialogButton" }]
|
||||
"parameters": [
|
||||
{ "name": "dialogId", "type": "string" },
|
||||
{ "name": "dialogButton", "$ref": "DialogButton" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dismissDialog",
|
||||
@@ -6464,7 +6506,10 @@
|
||||
"id": "HeaderEntry",
|
||||
"description": "Response HTTP header entry",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "name", "type": "string" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "name", "type": "string" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "AuthChallenge",
|
||||
@@ -8301,19 +8346,28 @@
|
||||
"id": "PlayerProperty",
|
||||
"description": "Corresponds to kMediaPropertyChange",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "name", "type": "string" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "name", "type": "string" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "PlayerEvent",
|
||||
"description": "Corresponds to kMediaEventTriggered",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "timestamp", "$ref": "Timestamp" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "timestamp", "$ref": "Timestamp" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "PlayerErrorSourceLocation",
|
||||
"description": "Represents logged source line numbers reported in an error.\nNOTE: file and line are from chromium c++ implementation code, not js.",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "file", "type": "string" }, { "name": "line", "type": "integer" }]
|
||||
"properties": [
|
||||
{ "name": "file", "type": "string" },
|
||||
{ "name": "line", "type": "integer" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "PlayerError",
|
||||
@@ -12357,7 +12411,10 @@
|
||||
"description": "Pair of issuer origin and number of available (signed, but not used) Trust\nTokens from that issuer.",
|
||||
"experimental": true,
|
||||
"type": "object",
|
||||
"properties": [{ "name": "issuerOrigin", "type": "string" }, { "name": "count", "type": "number" }]
|
||||
"properties": [
|
||||
{ "name": "issuerOrigin", "type": "string" },
|
||||
{ "name": "count", "type": "number" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "InterestGroupAccessType",
|
||||
@@ -12420,7 +12477,10 @@
|
||||
"id": "SharedStorageEntry",
|
||||
"description": "Struct for a single key-value pair in an origin's shared storage.",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "key", "type": "string" }, { "name": "value", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "key", "type": "string" },
|
||||
{ "name": "value", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "SharedStorageMetadata",
|
||||
@@ -12436,7 +12496,10 @@
|
||||
"id": "SharedStorageReportingMetadata",
|
||||
"description": "Pair of reporting metadata details for a candidate URL for `selectURL()`.",
|
||||
"type": "object",
|
||||
"properties": [{ "name": "eventType", "type": "string" }, { "name": "reportingUrl", "type": "string" }]
|
||||
"properties": [
|
||||
{ "name": "eventType", "type": "string" },
|
||||
{ "name": "reportingUrl", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "SharedStorageUrlWithMetadata",
|
||||
@@ -12568,7 +12631,10 @@
|
||||
"id": "AttributionReportingAggregationKeysEntry",
|
||||
"experimental": true,
|
||||
"type": "object",
|
||||
"properties": [{ "name": "key", "type": "string" }, { "name": "value", "$ref": "UnsignedInt128AsBase16" }]
|
||||
"properties": [
|
||||
{ "name": "key", "type": "string" },
|
||||
{ "name": "value", "$ref": "UnsignedInt128AsBase16" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "AttributionReportingEventReportWindows",
|
||||
@@ -12943,7 +13009,10 @@
|
||||
"name": "getInterestGroupDetails",
|
||||
"description": "Gets details for a named interest group.",
|
||||
"experimental": true,
|
||||
"parameters": [{ "name": "ownerOrigin", "type": "string" }, { "name": "name", "type": "string" }],
|
||||
"parameters": [
|
||||
{ "name": "ownerOrigin", "type": "string" },
|
||||
{ "name": "name", "type": "string" }
|
||||
],
|
||||
"returns": [{ "name": "details", "$ref": "InterestGroupDetails" }]
|
||||
},
|
||||
{
|
||||
@@ -12986,7 +13055,10 @@
|
||||
"name": "deleteSharedStorageEntry",
|
||||
"description": "Deletes entry for `key` (if it exists) for a given origin's shared storage.",
|
||||
"experimental": true,
|
||||
"parameters": [{ "name": "ownerOrigin", "type": "string" }, { "name": "key", "type": "string" }]
|
||||
"parameters": [
|
||||
{ "name": "ownerOrigin", "type": "string" },
|
||||
{ "name": "key", "type": "string" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "clearSharedStorageEntries",
|
||||
@@ -13010,7 +13082,10 @@
|
||||
"name": "setStorageBucketTracking",
|
||||
"description": "Set tracking for a storage key's buckets.",
|
||||
"experimental": true,
|
||||
"parameters": [{ "name": "storageKey", "type": "string" }, { "name": "enable", "type": "boolean" }]
|
||||
"parameters": [
|
||||
{ "name": "storageKey", "type": "string" },
|
||||
{ "name": "enable", "type": "boolean" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "deleteStorageBucket",
|
||||
@@ -13456,7 +13531,10 @@
|
||||
"id": "RemoteLocation",
|
||||
"experimental": true,
|
||||
"type": "object",
|
||||
"properties": [{ "name": "host", "type": "string" }, { "name": "port", "type": "integer" }]
|
||||
"properties": [
|
||||
{ "name": "host", "type": "string" },
|
||||
{ "name": "port", "type": "integer" }
|
||||
]
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
|
||||
@@ -5,9 +5,13 @@
|
||||
"std.debug.assert": "Use bun.assert instead",
|
||||
"std.debug.dumpStackTrace": "Use bun.handleErrorReturnTrace or bun.crash_handler.dumpStackTrace instead",
|
||||
"std.debug.print": "Don't let this be committed",
|
||||
"std.mem.indexOfAny": "Use bun.strings.indexAny or bun.strings.indexAnyComptime",
|
||||
"std.mem.indexOfAny(u8": "Use bun.strings.indexOfAny",
|
||||
"undefined != ": "This is by definition Undefined Behavior.",
|
||||
"undefined == ": "This is by definition Undefined Behavior.",
|
||||
"bun.toFD(std.fs.cwd().fd)": "Use bun.FD.cwd()",
|
||||
"std.StringArrayHashMapUnmanaged(": "bun.StringArrayHashMapUnmanaged has a faster `eql`",
|
||||
"std.StringArrayHashMap(": "bun.StringArrayHashMap has a faster `eql`",
|
||||
"std.StringHashMapUnmanaged(": "bun.StringHashMapUnmanaged has a faster `eql`",
|
||||
"std.StringHashMap(": "bun.StringHashMaphas a faster `eql`",
|
||||
"": ""
|
||||
}
|
||||
|
||||
@@ -19,9 +19,7 @@ for (const [banned, suggestion] of Object.entries(BANNED)) {
|
||||
if (banned.length === 0) continue;
|
||||
// Run git grep to find occurrences of std.debug.assert in .zig files
|
||||
// .nothrow() is here since git will exit with non-zero if no matches are found.
|
||||
let stdout = await $`git grep -n -F "${banned}" "src/**/**.zig" | grep -v -F '//' | grep -v -F bench`
|
||||
.nothrow()
|
||||
.text();
|
||||
let stdout = await $`git grep -n -F "${banned}" "src/**.zig" | grep -v -F '//' | grep -v -F bench`.nothrow().text();
|
||||
|
||||
stdout = stdout.trim();
|
||||
if (stdout.length === 0) continue;
|
||||
|
||||
@@ -152,9 +152,9 @@ function getMaxFileDescriptor(path) {
|
||||
|
||||
hasInitialMaxFD = true;
|
||||
|
||||
if (process.platform === "linux") {
|
||||
if (process.platform === "linux" || process.platform === "darwin") {
|
||||
try {
|
||||
readdirSync("/proc/self/fd").forEach(name => {
|
||||
readdirSync(process.platform === "darwin" ? "/dev/fd" : "/proc/self/fd").forEach(name => {
|
||||
const fd = parseInt(name.trim(), 10);
|
||||
if (Number.isSafeInteger(fd) && fd >= 0) {
|
||||
maxFd = Math.max(maxFd, fd);
|
||||
@@ -186,7 +186,7 @@ function checkSlowTests() {
|
||||
);
|
||||
proc?.stdout?.destroy?.();
|
||||
proc?.stderr?.destroy?.();
|
||||
proc?.kill?.();
|
||||
proc?.kill?.(9);
|
||||
} else if (now - start > SHORT_TIMEOUT_DURATION) {
|
||||
console.error(
|
||||
`\x1b[33mwarning\x1b[0;2m:\x1b[0m Test ${JSON.stringify(path)} has been running for ${Math.ceil(
|
||||
@@ -419,16 +419,10 @@ function linkToGH(linkTo) {
|
||||
return `https://github.com/oven-sh/bun/blob/${git_sha}/${linkTo}`;
|
||||
}
|
||||
|
||||
function sectionLink(linkTo) {
|
||||
return "#" + linkTo.replace(/[^a-zA-Z0-9_-]/g, "").toLowerCase();
|
||||
}
|
||||
|
||||
failing_tests.sort((a, b) => a.path.localeCompare(b.path));
|
||||
passing_tests.sort((a, b) => a.localeCompare(b));
|
||||
|
||||
const failingTestDisplay = failing_tests
|
||||
.map(({ path, reason }) => `- [\`${path}\`](${sectionLink(path)})${reason ? ` ${reason}` : ""}`)
|
||||
.join("\n");
|
||||
const failingTestDisplay = failing_tests.map(({ path, reason }) => `- \`${path}\` ${reason}`).join("\n");
|
||||
|
||||
// const passingTestDisplay = passing_tests.map(path => `- \`${path}\``).join("\n");
|
||||
|
||||
|
||||
@@ -290,7 +290,7 @@ function formatBody(body?: string, isBase64Encoded?: boolean): string | null {
|
||||
if (!isBase64Encoded) {
|
||||
return body;
|
||||
}
|
||||
return Buffer.from(body).toString("base64");
|
||||
return Buffer.from(body, "base64").toString("utf8");
|
||||
}
|
||||
|
||||
type HttpEventV1 = {
|
||||
|
||||
@@ -22,10 +22,10 @@ bun upgrade
|
||||
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
|
||||
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
|
||||
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
|
||||
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
|
||||
- [Windows](https://www.npmjs.com/package/@oven/bun-windows-x64)
|
||||
- [Windows (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-windows-x64-baseline)
|
||||
|
||||
### Future Platforms
|
||||
|
||||
- [Windows](https://github.com/oven-sh/bun/issues/43)
|
||||
- Unix-like variants such as FreeBSD, OpenBSD, etc.
|
||||
- Android and iOS
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { AwsClient } from "aws4fetch";
|
||||
import { getBuild, getRelease, getSemver, getSha } from "../src/github";
|
||||
import { join, tmp } from "../src/fs";
|
||||
|
||||
const dryRun = process.argv.includes("--dry-run");
|
||||
|
||||
@@ -18,7 +19,7 @@ try {
|
||||
if (!dryRun) {
|
||||
process.exit(1);
|
||||
}
|
||||
console.log("since this is a dry run, i'll allow it");
|
||||
console.log("Continuing with a dry run using a fake client.\n");
|
||||
}
|
||||
|
||||
const latest = await getRelease();
|
||||
@@ -30,11 +31,11 @@ console.log("Found build:", full_commit_hash);
|
||||
|
||||
let paths: string[];
|
||||
if (latest.tag_name === release.tag_name) {
|
||||
paths = ["releases/latest", `releases/${release.tag_name}`];
|
||||
paths = ["releases/latest", `releases/${release.tag_name}`, `releases/${full_commit_hash}`];
|
||||
} else if (release.tag_name === "canary") {
|
||||
try {
|
||||
const build = await getSemver("canary", await getBuild());
|
||||
paths = ["releases/canary", `releases/${build}`, `releases/${full_commit_hash}`];
|
||||
paths = ["releases/canary", `releases/${build}`, `releases/${full_commit_hash}-canary`];
|
||||
} catch (error) {
|
||||
console.warn(error);
|
||||
paths = ["releases/canary"];
|
||||
@@ -44,6 +45,24 @@ if (latest.tag_name === release.tag_name) {
|
||||
}
|
||||
console.log("Found paths:", paths);
|
||||
|
||||
const local =
|
||||
"bun-" +
|
||||
(
|
||||
{
|
||||
darwin: "darwin",
|
||||
win32: "windows",
|
||||
linux: "linux",
|
||||
} as any
|
||||
)[process.platform] +
|
||||
"-" +
|
||||
(
|
||||
{
|
||||
arm64: "aarch64",
|
||||
x64: "x64",
|
||||
} as any
|
||||
)[process.arch] +
|
||||
".zip";
|
||||
|
||||
for (const asset of release.assets) {
|
||||
const url = asset.browser_download_url;
|
||||
const response = await fetch(url);
|
||||
@@ -63,7 +82,48 @@ for (const asset of release.assets) {
|
||||
default:
|
||||
contentType = response.headers.get("Content-Type") || "";
|
||||
}
|
||||
|
||||
const body = await response.arrayBuffer();
|
||||
|
||||
if (name == local) {
|
||||
// extract feature data using the local build
|
||||
const temp = tmp();
|
||||
await Bun.write(join(temp, "bun.zip"), body);
|
||||
let unzip = Bun.spawnSync({
|
||||
cmd: ["unzip", join(temp, "bun.zip")],
|
||||
cwd: temp,
|
||||
});
|
||||
if (!unzip.success) throw new Error("Failed to unzip");
|
||||
let data = Bun.spawnSync({
|
||||
cmd: [
|
||||
join(temp, local.replace(".zip", ""), "bun"),
|
||||
"--print",
|
||||
'JSON.stringify(require("bun:internal-for-testing").crash_handler.getFeatureData())',
|
||||
],
|
||||
cwd: temp,
|
||||
env: {
|
||||
...process.env,
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "0",
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1",
|
||||
},
|
||||
stdio: ["ignore", "pipe", "inherit"],
|
||||
});
|
||||
const json = data.stdout.toString("utf8");
|
||||
for (const path of paths) {
|
||||
const key = `${path}/features.json`;
|
||||
console.log("Uploading:", key);
|
||||
await uploadToS3({
|
||||
key,
|
||||
body: new TextEncoder().encode(json).buffer,
|
||||
headers: {
|
||||
"Content-Type": contentType,
|
||||
"Content-Disposition": `attachment; filename="${name}"`,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const path of paths) {
|
||||
const key = `${path}/${name}`;
|
||||
console.log("Uploading:", key);
|
||||
@@ -78,6 +138,15 @@ for (const asset of release.assets) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!dryRun && process.env.BUN_REPORT_TOKEN) {
|
||||
await fetch(`https://bun.report/purge-cache/${full_commit_hash}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: process.env.BUN_REPORT_TOKEN,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
console.log("Done");
|
||||
|
||||
async function uploadToS3({
|
||||
|
||||
203
packages/bun-types/bun.d.ts
vendored
203
packages/bun-types/bun.d.ts
vendored
@@ -15,7 +15,7 @@
|
||||
*/
|
||||
declare module "bun" {
|
||||
import type { Encoding as CryptoEncoding } from "crypto";
|
||||
|
||||
import type { CipherNameAndProtocol, EphemeralKeyInfo, PeerCertificate } from "tls";
|
||||
interface Env {
|
||||
NODE_ENV?: string;
|
||||
/**
|
||||
@@ -1455,7 +1455,7 @@ declare module "bun" {
|
||||
* ```js
|
||||
* const {imports, exports} = transpiler.scan(`
|
||||
* import {foo} from "baz";
|
||||
* const hello = "hi!";
|
||||
* export const hello = "hi!";
|
||||
* `);
|
||||
*
|
||||
* console.log(imports); // ["baz"]
|
||||
@@ -1516,11 +1516,12 @@ declare module "bun" {
|
||||
plugins?: BunPlugin[];
|
||||
// manifest?: boolean; // whether to return manifest
|
||||
external?: string[];
|
||||
packages?: "bundle" | "external";
|
||||
publicPath?: string;
|
||||
define?: Record<string, string>;
|
||||
// origin?: string; // e.g. http://mydomain.com
|
||||
loader?: { [k in string]: Loader };
|
||||
sourcemap?: "none" | "inline" | "external"; // default: "none"
|
||||
sourcemap?: "none" | "linked" | "inline" | "external"; // default: "none", true -> "inline"
|
||||
/**
|
||||
* package.json `exports` conditions used when resolving imports
|
||||
*
|
||||
@@ -2968,7 +2969,7 @@ declare module "bun" {
|
||||
* Returns 0 if the versions are equal, 1 if `v1` is greater, or -1 if `v2` is greater.
|
||||
* Throws an error if either version is invalid.
|
||||
*/
|
||||
order(v1: StringLike, v2: StringLike): -1 | 0 | 1;
|
||||
order(this: void, v1: StringLike, v2: StringLike): -1 | 0 | 1;
|
||||
}
|
||||
var semver: Semver;
|
||||
|
||||
@@ -3099,6 +3100,10 @@ declare module "bun" {
|
||||
*/
|
||||
function openInEditor(path: string, options?: EditorOptions): void;
|
||||
|
||||
const fetch: typeof globalThis.fetch & {
|
||||
preconnect(url: string): void;
|
||||
};
|
||||
|
||||
interface EditorOptions {
|
||||
editor?: "vscode" | "subl";
|
||||
line?: number;
|
||||
@@ -3216,7 +3221,8 @@ declare module "bun" {
|
||||
*
|
||||
* @param hashInto `TypedArray` to write the hash into. Faster than creating a new one each time
|
||||
*/
|
||||
digest(hashInto?: NodeJS.TypedArray): NodeJS.TypedArray;
|
||||
digest(): Buffer;
|
||||
digest(hashInto: NodeJS.TypedArray): NodeJS.TypedArray;
|
||||
|
||||
/**
|
||||
* Run the hash over the given data
|
||||
@@ -3225,10 +3231,11 @@ declare module "bun" {
|
||||
*
|
||||
* @param hashInto `TypedArray` to write the hash into. Faster than creating a new one each time
|
||||
*/
|
||||
static hash(algorithm: SupportedCryptoAlgorithms, input: Bun.BlobOrStringOrBuffer): Buffer;
|
||||
static hash(
|
||||
algorithm: SupportedCryptoAlgorithms,
|
||||
input: Bun.BlobOrStringOrBuffer,
|
||||
hashInto?: NodeJS.TypedArray,
|
||||
hashInto: NodeJS.TypedArray,
|
||||
): NodeJS.TypedArray;
|
||||
|
||||
/**
|
||||
@@ -3871,6 +3878,15 @@ declare module "bun" {
|
||||
*/
|
||||
timeout(seconds: number): void;
|
||||
|
||||
/**
|
||||
* Forcefully close the socket. The other end may not receive all data, and
|
||||
* the socket will be closed immediately.
|
||||
*
|
||||
* This passes `SO_LINGER` with `l_onoff` set to `1` and `l_linger` set to
|
||||
* `0` and then calls `close(2)`.
|
||||
*/
|
||||
terminate(): void;
|
||||
|
||||
/**
|
||||
* Shutdown writes to a socket
|
||||
*
|
||||
@@ -3917,6 +3933,181 @@ declare module "bun" {
|
||||
* local port connected to the socket
|
||||
*/
|
||||
readonly localPort: number;
|
||||
|
||||
/**
|
||||
* This property is `true` if the peer certificate was signed by one of the CAs
|
||||
* specified when creating the `Socket` instance, otherwise `false`.
|
||||
*/
|
||||
readonly authorized: boolean;
|
||||
|
||||
/**
|
||||
* String containing the selected ALPN protocol.
|
||||
* Before a handshake has completed, this value is always null.
|
||||
* When a handshake is completed but not ALPN protocol was selected, socket.alpnProtocol equals false.
|
||||
*/
|
||||
readonly alpnProtocol: string | false | null;
|
||||
|
||||
/**
|
||||
* Disables TLS renegotiation for this `Socket` instance. Once called, attempts
|
||||
* to renegotiate will trigger an `error` handler on the `Socket`.
|
||||
*
|
||||
* There is no support for renegotiation as a server. (Attempts by clients will result in a fatal alert so that ClientHello messages cannot be used to flood a server and escape higher-level limits.)
|
||||
*/
|
||||
disableRenegotiation(): void;
|
||||
|
||||
/**
|
||||
* Keying material is used for validations to prevent different kind of attacks in
|
||||
* network protocols, for example in the specifications of IEEE 802.1X.
|
||||
*
|
||||
* Example
|
||||
*
|
||||
* ```js
|
||||
* const keyingMaterial = socket.exportKeyingMaterial(
|
||||
* 128,
|
||||
* 'client finished');
|
||||
*
|
||||
* /*
|
||||
* Example return value of keyingMaterial:
|
||||
* <Buffer 76 26 af 99 c5 56 8e 42 09 91 ef 9f 93 cb ad 6c 7b 65 f8 53 f1 d8 d9
|
||||
* 12 5a 33 b8 b5 25 df 7b 37 9f e0 e2 4f b8 67 83 a3 2f cd 5d 41 42 4c 91
|
||||
* 74 ef 2c ... 78 more bytes>
|
||||
*
|
||||
* ```
|
||||
*
|
||||
* @param length number of bytes to retrieve from keying material
|
||||
* @param label an application specific label, typically this will be a value from the [IANA Exporter Label
|
||||
* Registry](https://www.iana.org/assignments/tls-parameters/tls-parameters.xhtml#exporter-labels).
|
||||
* @param context Optionally provide a context.
|
||||
* @return requested bytes of the keying material
|
||||
*/
|
||||
exportKeyingMaterial(length: number, label: string, context: Buffer): Buffer;
|
||||
|
||||
/**
|
||||
* Returns the reason why the peer's certificate was not been verified. This
|
||||
* property is set only when `socket.authorized === false`.
|
||||
*/
|
||||
getAuthorizationError(): Error | null;
|
||||
|
||||
/**
|
||||
* Returns an object representing the local certificate. The returned object has
|
||||
* some properties corresponding to the fields of the certificate.
|
||||
*
|
||||
* If there is no local certificate, an empty object will be returned. If the
|
||||
* socket has been destroyed, `null` will be returned.
|
||||
*/
|
||||
getCertificate(): PeerCertificate | object | null;
|
||||
|
||||
/**
|
||||
* Returns an object containing information on the negotiated cipher suite.
|
||||
*
|
||||
* For example, a TLSv1.2 protocol with AES256-SHA cipher:
|
||||
*
|
||||
* ```json
|
||||
* {
|
||||
* "name": "AES256-SHA",
|
||||
* "standardName": "TLS_RSA_WITH_AES_256_CBC_SHA",
|
||||
* "version": "SSLv3"
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
*/
|
||||
getCipher(): CipherNameAndProtocol;
|
||||
|
||||
/**
|
||||
* Returns an object representing the type, name, and size of parameter of
|
||||
* an ephemeral key exchange in `perfect forward secrecy` on a client
|
||||
* connection. It returns an empty object when the key exchange is not
|
||||
* ephemeral. As this is only supported on a client socket; `null` is returned
|
||||
* if called on a server socket. The supported types are `'DH'` and `'ECDH'`. The`name` property is available only when type is `'ECDH'`.
|
||||
*
|
||||
* For example: `{ type: 'ECDH', name: 'prime256v1', size: 256 }`.
|
||||
*/
|
||||
getEphemeralKeyInfo(): EphemeralKeyInfo | object | null;
|
||||
|
||||
/**
|
||||
* Returns an object representing the peer's certificate. If the peer does not
|
||||
* provide a certificate, an empty object will be returned. If the socket has been
|
||||
* destroyed, `null` will be returned.
|
||||
*
|
||||
* If the full certificate chain was requested, each certificate will include an`issuerCertificate` property containing an object representing its issuer's
|
||||
* certificate.
|
||||
* @return A certificate object.
|
||||
*/
|
||||
getPeerCertificate(): PeerCertificate;
|
||||
|
||||
/**
|
||||
* See [SSL\_get\_shared\_sigalgs](https://www.openssl.org/docs/man1.1.1/man3/SSL_get_shared_sigalgs.html) for more information.
|
||||
* @since v12.11.0
|
||||
* @return List of signature algorithms shared between the server and the client in the order of decreasing preference.
|
||||
*/
|
||||
getSharedSigalgs(): string[];
|
||||
|
||||
/**
|
||||
* As the `Finished` messages are message digests of the complete handshake
|
||||
* (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can
|
||||
* be used for external authentication procedures when the authentication
|
||||
* provided by SSL/TLS is not desired or is not enough.
|
||||
*
|
||||
* @return The latest `Finished` message that has been sent to the socket as part of a SSL/TLS handshake, or `undefined` if no `Finished` message has been sent yet.
|
||||
*/
|
||||
getTLSFinishedMessage(): Buffer | undefined;
|
||||
|
||||
/**
|
||||
* As the `Finished` messages are message digests of the complete handshake
|
||||
* (with a total of 192 bits for TLS 1.0 and more for SSL 3.0), they can
|
||||
* be used for external authentication procedures when the authentication
|
||||
* provided by SSL/TLS is not desired or is not enough.
|
||||
*
|
||||
* @return The latest `Finished` message that is expected or has actually been received from the socket as part of a SSL/TLS handshake, or `undefined` if there is no `Finished` message so
|
||||
* far.
|
||||
*/
|
||||
getTLSPeerFinishedMessage(): Buffer | undefined;
|
||||
|
||||
/**
|
||||
* For a client, returns the TLS session ticket if one is available, or`undefined`. For a server, always returns `undefined`.
|
||||
*
|
||||
* It may be useful for debugging.
|
||||
*
|
||||
* See `Session Resumption` for more information.
|
||||
*/
|
||||
getTLSTicket(): Buffer | undefined;
|
||||
|
||||
/**
|
||||
* Returns a string containing the negotiated SSL/TLS protocol version of the
|
||||
* current connection. The value `'unknown'` will be returned for connected
|
||||
* sockets that have not completed the handshaking process. The value `null` will
|
||||
* be returned for server sockets or disconnected client sockets.
|
||||
*
|
||||
* Protocol versions are:
|
||||
*
|
||||
* * `'SSLv3'`
|
||||
* * `'TLSv1'`
|
||||
* * `'TLSv1.1'`
|
||||
* * `'TLSv1.2'`
|
||||
* * `'TLSv1.3'`
|
||||
*
|
||||
*/
|
||||
getTLSVersion(): string;
|
||||
|
||||
/**
|
||||
* See `Session Resumption` for more information.
|
||||
* @return `true` if the session was reused, `false` otherwise.
|
||||
*/
|
||||
isSessionReused(): boolean;
|
||||
|
||||
/**
|
||||
* The `socket.setMaxSendFragment()` method sets the maximum TLS fragment size.
|
||||
* Returns `true` if setting the limit succeeded; `false` otherwise.
|
||||
*
|
||||
* Smaller fragment sizes decrease the buffering latency on the client: larger
|
||||
* fragments are buffered by the TLS layer until the entire fragment is received
|
||||
* and its integrity is verified; large fragments can span multiple roundtrips
|
||||
* and their processing can be delayed due to packet loss or reordering. However,
|
||||
* smaller fragments add extra TLS framing bytes and CPU overhead, which may
|
||||
* decrease overall server throughput.
|
||||
* @param [size=16384] The maximum TLS fragment size. The maximum value is `16384`.
|
||||
*/
|
||||
setMaxSendFragment(size: number): boolean;
|
||||
}
|
||||
|
||||
interface SocketListener<Data = undefined> {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user