mirror of
https://github.com/oven-sh/bun
synced 2026-02-25 19:17:20 +01:00
Compare commits
485 Commits
test-astro
...
jdalton/cu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c821a4d14a | ||
|
|
ff965af067 | ||
|
|
48d642e9e0 | ||
|
|
e197b7c3a0 | ||
|
|
95e466628b | ||
|
|
41dda11802 | ||
|
|
8ba332b43a | ||
|
|
52894121ea | ||
|
|
a5e83189df | ||
|
|
405e2d1517 | ||
|
|
483b73c4bf | ||
|
|
cb97baed22 | ||
|
|
01df0e6ef0 | ||
|
|
08aa40c570 | ||
|
|
82fcb38005 | ||
|
|
fc80683256 | ||
|
|
54be1d7705 | ||
|
|
7eed9d893e | ||
|
|
e77f9036e0 | ||
|
|
5a6ef045aa | ||
|
|
9350f6bc0b | ||
|
|
4ccf6fb284 | ||
|
|
83f2432da6 | ||
|
|
f673a84c75 | ||
|
|
51680dbfdc | ||
|
|
1aa36e27cb | ||
|
|
e6285a1cc8 | ||
|
|
a064ae71ad | ||
|
|
f9e78c419f | ||
|
|
0fb864e0ca | ||
|
|
a4da0acb0e | ||
|
|
f47def8515 | ||
|
|
14398ae27e | ||
|
|
64d351b102 | ||
|
|
6b577a3970 | ||
|
|
a159850cce | ||
|
|
87eedc29d5 | ||
|
|
124392ee98 | ||
|
|
e5421c56b1 | ||
|
|
407f42718c | ||
|
|
a36355cefb | ||
|
|
2d21567271 | ||
|
|
611b296a6a | ||
|
|
317998fdd6 | ||
|
|
ab40b0e054 | ||
|
|
2c3dc5176b | ||
|
|
5fc4538477 | ||
|
|
be0896e204 | ||
|
|
56f1e5a362 | ||
|
|
702da52312 | ||
|
|
2a8956a268 | ||
|
|
86b1aa02b3 | ||
|
|
065eae5a95 | ||
|
|
9fa13aec09 | ||
|
|
3e703c4c76 | ||
|
|
01453be2d1 | ||
|
|
9b32516bcb | ||
|
|
7e787d2a5a | ||
|
|
b7a32b87ab | ||
|
|
0491ff4186 | ||
|
|
943c72c8a1 | ||
|
|
cb6f4b9f4d | ||
|
|
45bd2264e3 | ||
|
|
8733748eb6 | ||
|
|
66fa317390 | ||
|
|
ef0e96a2b2 | ||
|
|
b0d326dfb9 | ||
|
|
517d5ecde9 | ||
|
|
072f2f15ea | ||
|
|
0cd7fa1a63 | ||
|
|
2fe0f90d36 | ||
|
|
0783b4bf0e | ||
|
|
4f96bc416b | ||
|
|
f43ecd8aa2 | ||
|
|
31db26b054 | ||
|
|
f5675284c7 | ||
|
|
98d253d9bb | ||
|
|
9846383b01 | ||
|
|
fbe4ed206a | ||
|
|
15263664b6 | ||
|
|
4611b84352 | ||
|
|
922ff08a80 | ||
|
|
4c933f733b | ||
|
|
f1c3e87a06 | ||
|
|
0bfe3760f6 | ||
|
|
68bc548dce | ||
|
|
7d4cef53a8 | ||
|
|
b400b36ca5 | ||
|
|
36d4f09d28 | ||
|
|
e77e1c1a58 | ||
|
|
6e1ef49ad4 | ||
|
|
2daade39a9 | ||
|
|
ae78441b25 | ||
|
|
fb8b47b2db | ||
|
|
9a0dadad24 | ||
|
|
f27fc77b3b | ||
|
|
88fcfa4461 | ||
|
|
9d56ea4880 | ||
|
|
b488734c50 | ||
|
|
8f975ec8ec | ||
|
|
1aee55524a | ||
|
|
8805e6150a | ||
|
|
237b5eded6 | ||
|
|
437faa9444 | ||
|
|
e65bab75bc | ||
|
|
d985e4db65 | ||
|
|
9a267db105 | ||
|
|
06c530ba2e | ||
|
|
a1e742734a | ||
|
|
0dc6108bd3 | ||
|
|
ecdde88670 | ||
|
|
2fd85ef0e6 | ||
|
|
a774f92757 | ||
|
|
eb84ebf974 | ||
|
|
bfe8afd674 | ||
|
|
e8179adcbb | ||
|
|
515ce30a85 | ||
|
|
97f8c92cf4 | ||
|
|
d4d6ef9e89 | ||
|
|
fa7e4bce27 | ||
|
|
89ec2fcb35 | ||
|
|
39ab26a663 | ||
|
|
e103b53c86 | ||
|
|
ee85bbc0a3 | ||
|
|
0103dd56ab | ||
|
|
5d6c68122c | ||
|
|
47acee5d87 | ||
|
|
54d762531a | ||
|
|
e5bd7fcb8f | ||
|
|
f30689256d | ||
|
|
50c1f37230 | ||
|
|
80ebcbf3d8 | ||
|
|
6345add18a | ||
|
|
2946bacbbd | ||
|
|
7fa2cf3f61 | ||
|
|
aa388e6762 | ||
|
|
5a2ad792a0 | ||
|
|
69c85dbe17 | ||
|
|
71d9199270 | ||
|
|
6b3e1af25d | ||
|
|
9f8ee7c8f7 | ||
|
|
233622d6bb | ||
|
|
837cbd60d5 | ||
|
|
9d6c0649a4 | ||
|
|
693a00dc5b | ||
|
|
1f9ce68348 | ||
|
|
492b2d5b76 | ||
|
|
8eebfd8e22 | ||
|
|
1d684e0d4f | ||
|
|
14c60eca94 | ||
|
|
6eab178790 | ||
|
|
32ca195f60 | ||
|
|
c381816c57 | ||
|
|
ae8f2a3afb | ||
|
|
25441ede20 | ||
|
|
a0c8ee31a1 | ||
|
|
135039b137 | ||
|
|
05f14c8d6a | ||
|
|
15755a0246 | ||
|
|
b6cadf387b | ||
|
|
b011fd2567 | ||
|
|
5e72a27a76 | ||
|
|
9ef7b304d9 | ||
|
|
38d5a5fe9a | ||
|
|
43b99021f8 | ||
|
|
8a34b8f6f8 | ||
|
|
70f344101b | ||
|
|
a3a6b894c5 | ||
|
|
a5bf1216d1 | ||
|
|
ae5f1b0168 | ||
|
|
20690403da | ||
|
|
903a202473 | ||
|
|
14bc121dc8 | ||
|
|
eac3bda895 | ||
|
|
0a7bbcb9a6 | ||
|
|
107015fd48 | ||
|
|
813afe6c53 | ||
|
|
411094b82d | ||
|
|
1f90608d66 | ||
|
|
2b09347dc6 | ||
|
|
f02ef8fb35 | ||
|
|
da13884295 | ||
|
|
8a39c02e7a | ||
|
|
bfc065c5cc | ||
|
|
93c081f351 | ||
|
|
4a66091491 | ||
|
|
d1dcf5ccf7 | ||
|
|
f8159f1b36 | ||
|
|
1a370eb483 | ||
|
|
f12ff9d1e5 | ||
|
|
4d6219eda4 | ||
|
|
a3630dde2e | ||
|
|
c6ba32b850 | ||
|
|
05984d405f | ||
|
|
696a5a28c9 | ||
|
|
b399deebdd | ||
|
|
39ea8cbfea | ||
|
|
09d51486e9 | ||
|
|
ead3bdf947 | ||
|
|
6077ace528 | ||
|
|
a119e8d636 | ||
|
|
72eb607e21 | ||
|
|
26325aca95 | ||
|
|
98242fa10d | ||
|
|
906f86d6fd | ||
|
|
9fdd1068da | ||
|
|
7e59f287a1 | ||
|
|
56df6a9888 | ||
|
|
ada49eb96d | ||
|
|
c5436c09ab | ||
|
|
a93f467a74 | ||
|
|
cd55401eba | ||
|
|
0f8bcfdc53 | ||
|
|
2428da9354 | ||
|
|
65e11c48b6 | ||
|
|
6b549ee592 | ||
|
|
5b538fb076 | ||
|
|
7e511f55de | ||
|
|
c271c6c38e | ||
|
|
00955c712e | ||
|
|
00e745ec2c | ||
|
|
6ee812d63d | ||
|
|
a623e7c702 | ||
|
|
59a0e885d9 | ||
|
|
3f1c39ad0b | ||
|
|
39d062cf3c | ||
|
|
92901eb6e0 | ||
|
|
ab68aa5a8e | ||
|
|
3f131c0411 | ||
|
|
db60c92e25 | ||
|
|
1730cfbbbf | ||
|
|
45a499912b | ||
|
|
61f92d9e5f | ||
|
|
3baaa28f93 | ||
|
|
b88324db31 | ||
|
|
dc19541719 | ||
|
|
2d2f3c4e50 | ||
|
|
f3ce571a29 | ||
|
|
1a2643520b | ||
|
|
31c17a1bb3 | ||
|
|
a1a4178c3f | ||
|
|
7ac5bdc9dd | ||
|
|
506c165860 | ||
|
|
6fc29b6412 | ||
|
|
b6f60680f6 | ||
|
|
1f71ad5661 | ||
|
|
cbb477cbe2 | ||
|
|
369e3022e4 | ||
|
|
925a94ffe6 | ||
|
|
ca89087684 | ||
|
|
9e47ceac87 | ||
|
|
4098dd0df5 | ||
|
|
c3b1333ae8 | ||
|
|
f88e5d128c | ||
|
|
4d7c66066b | ||
|
|
3872f7a5b7 | ||
|
|
6443c26a4a | ||
|
|
c2cd7402a0 | ||
|
|
fbcb622335 | ||
|
|
fc733fe89c | ||
|
|
441612917d | ||
|
|
adcad39e1d | ||
|
|
453a270b2e | ||
|
|
fe5e19da59 | ||
|
|
19e80771ac | ||
|
|
36c316a24a | ||
|
|
c0ccdccbeb | ||
|
|
1fc9b56936 | ||
|
|
8ec405facc | ||
|
|
beedb28e38 | ||
|
|
1aaf63efc4 | ||
|
|
0bce2b46ca | ||
|
|
c8305df5c2 | ||
|
|
9787ea91f2 | ||
|
|
a648ed9e6a | ||
|
|
ab95e2189f | ||
|
|
dc0736d61d | ||
|
|
5029fc8564 | ||
|
|
38c6575dc8 | ||
|
|
efb2cb5871 | ||
|
|
9fc12a1244 | ||
|
|
900eec71fa | ||
|
|
4fec7f84c8 | ||
|
|
5787350243 | ||
|
|
35ba928430 | ||
|
|
7ec64c2cc8 | ||
|
|
5e60861c34 | ||
|
|
39b66ded08 | ||
|
|
a2f595d352 | ||
|
|
d163351f3e | ||
|
|
351e47355a | ||
|
|
83ed65c687 | ||
|
|
ab85b2a2a0 | ||
|
|
6a63224f86 | ||
|
|
c9e98db258 | ||
|
|
351810e5bc | ||
|
|
3aabe58180 | ||
|
|
800fb12906 | ||
|
|
7541f4ce4f | ||
|
|
402f3353ac | ||
|
|
73b0426596 | ||
|
|
54f1f4635e | ||
|
|
300d17f223 | ||
|
|
ce63023553 | ||
|
|
59f9d2fe70 | ||
|
|
aaa827f90e | ||
|
|
c84be8c48b | ||
|
|
1d45c1c412 | ||
|
|
653e2934fe | ||
|
|
798f548607 | ||
|
|
1337bb6923 | ||
|
|
b6775e2df7 | ||
|
|
4519f97661 | ||
|
|
dfd4b01a97 | ||
|
|
1bf540efcf | ||
|
|
b1c8ae97ff | ||
|
|
6a5995e27d | ||
|
|
24ca605832 | ||
|
|
ad4b98b287 | ||
|
|
fd1d4b4e5a | ||
|
|
f0e0c6d5df | ||
|
|
b77480758f | ||
|
|
93d9b20974 | ||
|
|
f303bcd997 | ||
|
|
62e1389e17 | ||
|
|
b952ef8ad9 | ||
|
|
ec6339c25a | ||
|
|
2ff788f463 | ||
|
|
df17598026 | ||
|
|
298a60419b | ||
|
|
2f83f32582 | ||
|
|
b3bdf22ebb | ||
|
|
d45f2dae15 | ||
|
|
67c7769ddb | ||
|
|
03298cba70 | ||
|
|
3963d2de64 | ||
|
|
b2bf3f0dcf | ||
|
|
ffe447ba5a | ||
|
|
7389f1b108 | ||
|
|
7538c20afe | ||
|
|
ed02691f8b | ||
|
|
dae985ab3c | ||
|
|
52e1c2de08 | ||
|
|
0c532eac7b | ||
|
|
70e57a7da0 | ||
|
|
fdece8611a | ||
|
|
1147c70881 | ||
|
|
3c97f568a7 | ||
|
|
310d0c5646 | ||
|
|
d79fa7ac1a | ||
|
|
906ba8b2a0 | ||
|
|
54df5c032d | ||
|
|
70039ff038 | ||
|
|
90654143bb | ||
|
|
bc114fb9d3 | ||
|
|
f3727f00dc | ||
|
|
6389a6e24f | ||
|
|
851b19ee0e | ||
|
|
ebae02a08b | ||
|
|
809266fc7a | ||
|
|
98885279eb | ||
|
|
666e615889 | ||
|
|
0b99b01b0e | ||
|
|
eb98b61201 | ||
|
|
a043497fb4 | ||
|
|
f5115835da | ||
|
|
34881922c8 | ||
|
|
44b9960113 | ||
|
|
713d72703d | ||
|
|
383a615d21 | ||
|
|
6ee42f7719 | ||
|
|
1840de3661 | ||
|
|
6aa1319fd9 | ||
|
|
6a7b1a3208 | ||
|
|
5c23d670a3 | ||
|
|
22818e6806 | ||
|
|
0abe3745d2 | ||
|
|
ea0ffde991 | ||
|
|
194294f5b5 | ||
|
|
7f10622a84 | ||
|
|
a71a9e3447 | ||
|
|
53ee2d77b2 | ||
|
|
3d58437fc8 | ||
|
|
6517252153 | ||
|
|
5f86b839b4 | ||
|
|
5692f82aaf | ||
|
|
93714292bf | ||
|
|
0766fb5ae8 | ||
|
|
e83a9132d5 | ||
|
|
bc4146295f | ||
|
|
d8be3e51b4 | ||
|
|
6ba4e950cc | ||
|
|
81067477dc | ||
|
|
d8ca6ca699 | ||
|
|
05ef3329c4 | ||
|
|
ab93f9f74e | ||
|
|
8db407e9a7 | ||
|
|
5eb6f83251 | ||
|
|
1b8d7d5cb1 | ||
|
|
bf32d36e4c | ||
|
|
e9661c2953 | ||
|
|
0b66fd643e | ||
|
|
778bad9dfd | ||
|
|
ef8b9efaa4 | ||
|
|
864d59f822 | ||
|
|
f5bf67bd1b | ||
|
|
075c09401a | ||
|
|
c23579d66c | ||
|
|
6c200c089c | ||
|
|
3eb086f5fa | ||
|
|
fbff18a723 | ||
|
|
6e7014c91b | ||
|
|
711ea1604b | ||
|
|
538a243453 | ||
|
|
8be492c080 | ||
|
|
6c7a0e5a79 | ||
|
|
f4d14c02c7 | ||
|
|
3ca2d8ae5e | ||
|
|
c23aed993f | ||
|
|
11e07caaa4 | ||
|
|
e4de49df2a | ||
|
|
47e1fbe468 | ||
|
|
8353ba7857 | ||
|
|
606c80b049 | ||
|
|
d1436e3ecc | ||
|
|
7e0e007e3e | ||
|
|
2e74f7f079 | ||
|
|
2c81c2a13f | ||
|
|
d9a0fd4f59 | ||
|
|
d1ec8f86f3 | ||
|
|
85c997513d | ||
|
|
904134cc75 | ||
|
|
15e4f1bad3 | ||
|
|
09b8b6b468 | ||
|
|
4dfc7feae0 | ||
|
|
77e6b946f4 | ||
|
|
31814934f3 | ||
|
|
b43a8b54cf | ||
|
|
a165306fd9 | ||
|
|
056d45cf6b | ||
|
|
46955e95ef | ||
|
|
e6e4ffb4ae | ||
|
|
5946e13e27 | ||
|
|
8f3e278ab9 | ||
|
|
547ed6d74d | ||
|
|
d4adcb3ccf | ||
|
|
d4628992d8 | ||
|
|
24b4b2c16d | ||
|
|
35bac672bb | ||
|
|
d649f69404 | ||
|
|
7ce5061343 | ||
|
|
20d9e25ae2 | ||
|
|
89c76f2b45 | ||
|
|
a9a68c3143 | ||
|
|
d1c1c8fb5f | ||
|
|
4228ecea98 | ||
|
|
909a821d3d | ||
|
|
7a4e0158d6 | ||
|
|
228bfbd24a | ||
|
|
45e1bc3a21 | ||
|
|
e1489d4fe3 | ||
|
|
273f612c77 | ||
|
|
a82b2b2d04 | ||
|
|
bf767b53d7 | ||
|
|
1db1941577 | ||
|
|
49f86c87b4 | ||
|
|
346e27083f | ||
|
|
6d666da4b9 | ||
|
|
feca0c2647 | ||
|
|
bf806602d8 | ||
|
|
e3ab7408c9 | ||
|
|
c49a5a7d7d | ||
|
|
450397a277 | ||
|
|
f63eba952e | ||
|
|
eaa9ba99fa | ||
|
|
f0cbc79d8b | ||
|
|
01e0136d7c | ||
|
|
520845217e | ||
|
|
42d8a0b44b | ||
|
|
84414f8fe8 | ||
|
|
046d197519 | ||
|
|
1250655ddb | ||
|
|
29b1a4f61b | ||
|
|
2d5698c719 | ||
|
|
c08e8a76ef |
8
.gitattributes
vendored
8
.gitattributes
vendored
@@ -37,3 +37,11 @@ src/bun.js/bindings/simdutf.cpp linguist-vendored
|
||||
src/bun.js/bindings/simdutf.h linguist-vendored
|
||||
|
||||
docs/**/* linguist-documentation
|
||||
|
||||
# Don't count tests in the language stats - https://github.com/github-linguist/linguist/blob/master/docs/overrides.md
|
||||
test/**/* linguist-documentation
|
||||
bench/**/* linguist-documentation
|
||||
examples/**/* linguist-documentation
|
||||
|
||||
src/deps/*.c linguist-vendored
|
||||
src/deps/brotli/** linguist-vendored
|
||||
|
||||
18
.github/pull_request_template.md
vendored
18
.github/pull_request_template.md
vendored
@@ -19,18 +19,17 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I ran `make js` and committed the transpiled changes
|
||||
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I or my editor ran `zig fmt` on the changed files
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
@@ -43,17 +42,6 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If functions were added to exports.zig or bindings.zig
|
||||
|
||||
- [ ] I ran `make headers` to regenerate the C header file
|
||||
|
||||
-->
|
||||
|
||||
<!-- If \*.classes.ts files were added or changed:
|
||||
|
||||
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
|
||||
12
.github/workflows/bun-linux-aarch64.yml
vendored
12
.github/workflows/bun-linux-aarch64.yml
vendored
@@ -12,9 +12,12 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
- ".github/workflows/bun-linux-aarch64.yml"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
@@ -22,9 +25,12 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
- ".github/workflows/bun-linux-aarch64.yml"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
@@ -47,7 +53,11 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
submodules: false
|
||||
ref: ${{github.sha}}
|
||||
clean: true
|
||||
- run: |
|
||||
bash ./scripts/update-submodules.sh
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
|
||||
74
.github/workflows/bun-linux-build.yml
vendored
74
.github/workflows/bun-linux-build.yml
vendored
@@ -12,6 +12,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -22,6 +24,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -45,17 +49,43 @@ jobs:
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
build_machine_arch: x86_64
|
||||
assertions: "OFF"
|
||||
zig_optimize: "ReleaseFast"
|
||||
target: "artifact"
|
||||
- cpu: nehalem
|
||||
tag: linux-x64-baseline
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
build_machine_arch: x86_64
|
||||
assertions: "OFF"
|
||||
zig_optimize: "ReleaseFast"
|
||||
target: "artifact"
|
||||
# - cpu: haswell
|
||||
# tag: linux-x64-assertions
|
||||
# arch: x86_64
|
||||
# build_arch: amd64
|
||||
# runner: big-ubuntu
|
||||
# build_machine_arch: x86_64
|
||||
# assertions: "ON"
|
||||
# zig_optimize: "ReleaseSafe"
|
||||
# target: "artifact-assertions"
|
||||
# - cpu: nehalem
|
||||
# tag: linux-x64-baseline-assertions
|
||||
# arch: x86_64
|
||||
# build_arch: amd64
|
||||
# runner: big-ubuntu
|
||||
# build_machine_arch: x86_64
|
||||
# assertions: "ON"
|
||||
# zig_optimize: "ReleaseSafe"
|
||||
# target: "artifact-assertions"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: ${{github.sha}}
|
||||
clean: true
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
@@ -84,7 +114,8 @@ jobs:
|
||||
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
|
||||
CPU_TARGET=${{matrix.cpu}}
|
||||
GIT_SHA=${{github.sha}}
|
||||
|
||||
ASSERTIONS=${{matrix.assertions}}
|
||||
ZIG_OPTIMIZE=${{matrix.zig_optimize}}
|
||||
SCCACHE_BUCKET=bun
|
||||
SCCACHE_REGION=auto
|
||||
SCCACHE_S3_USE_SSL=true
|
||||
@@ -92,8 +123,13 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
|
||||
platforms: linux/${{matrix.build_arch}}
|
||||
target: artifact
|
||||
target: ${{matrix.target}}
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- id: bun-version-check
|
||||
name: Bun version check
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
${{runner.temp}}/release/bun-profile --version
|
||||
- name: Zip
|
||||
run: |
|
||||
# if zip is not found
|
||||
@@ -187,12 +223,15 @@ jobs:
|
||||
include:
|
||||
- tag: linux-x64
|
||||
- tag: linux-x64-baseline
|
||||
# - tag: linux-x64-assertions
|
||||
# - tag: linux-x64-baseline-assertions
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: false
|
||||
clean: true
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
@@ -207,7 +246,11 @@ jobs:
|
||||
cd bun-${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: bun-version-check
|
||||
name: Bun version check
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install-dependnecies
|
||||
name: Install dependencies
|
||||
run: |
|
||||
@@ -215,6 +258,19 @@ jobs:
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
|
||||
bun install --cwd=test/js/third_party/prisma --verbose
|
||||
|
||||
# This is disabled because the cores are ~5.5gb each
|
||||
# so it is easy to hit 50gb coredump downloads. Only enable if you need to retrive one
|
||||
|
||||
# - name: Set core dumps to get stored in /cores
|
||||
# run: |
|
||||
# sudo mkdir /cores
|
||||
# sudo chmod 777 /cores
|
||||
# # Core filenames will be of the form executable.pid.timestamp:
|
||||
# sudo bash -c 'echo "/cores/%e.%p.%t" > /proc/sys/kernel/core_pattern'
|
||||
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -223,7 +279,15 @@ jobs:
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
ulimit -c unlimited
|
||||
ulimit -c
|
||||
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
with:
|
||||
name: cores
|
||||
path: /cores
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
|
||||
46
.github/workflows/bun-mac-aarch64.yml
vendored
46
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -15,6 +15,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -24,6 +26,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -98,7 +102,7 @@ jobs:
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
runner: macos-13-xlarge
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
@@ -110,13 +114,20 @@ jobs:
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
brew install go sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-aarch64.zip"
|
||||
unzip bun-darwin-aarch64.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-aarch64/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Hash submodule versions
|
||||
run: |
|
||||
print_data() {
|
||||
@@ -172,7 +183,7 @@ jobs:
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
runner: macos-13-xlarge
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
@@ -184,13 +195,20 @@ jobs:
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
brew install go sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-aarch64.zip"
|
||||
unzip bun-darwin-aarch64.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-aarch64/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
# TODO: replace with sccache
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
@@ -237,9 +255,13 @@ jobs:
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
runner: macos-13-xlarge
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: ${{github.sha}}
|
||||
clean: true
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
@@ -368,7 +390,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
runner: macos-13-xlarge
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
@@ -389,7 +411,11 @@ jobs:
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: bun-version-check
|
||||
name: Bun version check
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
|
||||
25
.github/workflows/bun-mac-x64-baseline.yml
vendored
25
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -15,6 +15,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -24,6 +26,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -109,7 +113,7 @@ jobs:
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -124,7 +128,7 @@ jobs:
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
@@ -183,7 +187,7 @@ jobs:
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -197,8 +201,8 @@ jobs:
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
@@ -255,7 +259,7 @@ jobs:
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
package: bun-darwin-x64
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -375,7 +379,8 @@ jobs:
|
||||
name: macOS Test
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
# if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
if: false
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 30
|
||||
@@ -387,7 +392,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-x64-baseline
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
@@ -408,7 +413,11 @@ jobs:
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: bun-version-check
|
||||
name: Bun version check
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
|
||||
23
.github/workflows/bun-mac-x64.yml
vendored
23
.github/workflows/bun-mac-x64.yml
vendored
@@ -15,6 +15,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -24,6 +26,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -107,7 +111,7 @@ jobs:
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
artifact: bun-obj-darwin-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -122,7 +126,7 @@ jobs:
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
@@ -181,7 +185,7 @@ jobs:
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
artifact: bun-obj-darwin-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -195,7 +199,7 @@ jobs:
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
@@ -253,9 +257,8 @@ jobs:
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
package: bun-darwin-x64
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
@@ -386,7 +389,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- tag: bun-darwin-x64
|
||||
runner: macos-12
|
||||
runner: macos-12-large
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
@@ -407,7 +410,11 @@ jobs:
|
||||
cd ${{matrix.tag}}
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: bun-version-check
|
||||
name: Bun version check
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
|
||||
8
.github/workflows/bun-release.yml
vendored
8
.github/workflows/bun-release.yml
vendored
@@ -60,7 +60,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
@@ -85,7 +85,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Setup Tag
|
||||
@@ -250,7 +250,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: bun-windows-x64
|
||||
name: bun-windows
|
||||
|
||||
concurrency:
|
||||
group: bun-windows-x64-${{ github.ref }}
|
||||
group: bun-windows-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
@@ -9,9 +9,7 @@ env:
|
||||
LLVM_VERSION: 16.0.6
|
||||
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
|
||||
|
||||
cpu: native
|
||||
arch: x86_64
|
||||
tag: bun-windows-x64
|
||||
tag: bun-windows
|
||||
# TODO: wire this up to workflow_dispatch.
|
||||
# github's expression syntax makes this hard to set a default to true
|
||||
canary: true
|
||||
@@ -23,6 +21,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -32,6 +32,8 @@ on:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "packages/bun-uws/src/**/*"
|
||||
- "CMakeLists.txt"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -45,6 +47,11 @@ on:
|
||||
|
||||
jobs:
|
||||
windows-zig:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
cpu: [haswell, nehalem]
|
||||
arch: [x86_64]
|
||||
name: Zig Build
|
||||
runs-on: med-ubuntu
|
||||
timeout-minutes: 60
|
||||
@@ -82,11 +89,13 @@ jobs:
|
||||
build-args: |
|
||||
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
|
||||
ARCH=${{ env.arch }}
|
||||
CPU_TARGET=${{ env.cpu }}
|
||||
TRIPLET=${{ env.arch }}-windows-msvc
|
||||
ARCH=${{ matrix.arch }}
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{ matrix.arch }}-windows-msvc
|
||||
GIT_SHA=${{ github.sha }}
|
||||
CANARY=${{ env.canary == 'true' && steps.canary.outputs.canary_revision || '0' }}
|
||||
ZIG_OPTIMIZE=ReleaseSafe
|
||||
# TODO(@paperdave): enable ASSERTIONS=1
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
@@ -94,13 +103,18 @@ jobs:
|
||||
- name: Upload Zig Object
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-zig
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: ${{runner.temp}}/release/bun-zig.o
|
||||
|
||||
windows-dependencies:
|
||||
name: Dependencies
|
||||
runs-on: windows
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
cpu: [haswell, nehalem]
|
||||
arch: [x86_64]
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -113,7 +127,8 @@ jobs:
|
||||
git submodule | Where-Object { $_ -notmatch 'WebKit' }
|
||||
clang --version
|
||||
rustc --version
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.sh', 'scripts/all-dependencies.sh' | Sort-Object -Property Name).FullName | Out-String
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
|
||||
echo 1
|
||||
})"
|
||||
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
|
||||
echo "sha=${hash}" >> $env:GITHUB_OUTPUT
|
||||
@@ -124,7 +139,7 @@ jobs:
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: bun-deps
|
||||
key: bun-deps-${{ env.tag }}-${{ steps.submodule-versions.outputs.sha }}
|
||||
key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }}
|
||||
|
||||
- name: Install LLVM ${{ env.LLVM_VERSION }}
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
@@ -139,7 +154,7 @@ jobs:
|
||||
- name: Build Dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
run: |
|
||||
.\scripts\env.ps1
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
Invoke-WebRequest -Uri "https://www.nasm.us/pub/nasm/releasebuilds/2.16.01/win64/nasm-2.16.01-win64.zip" -OutFile nasm.zip
|
||||
Expand-Archive nasm.zip (mkdir -Force "nasm")
|
||||
$Nasm = (Get-ChildItem "nasm")
|
||||
@@ -150,7 +165,7 @@ jobs:
|
||||
- name: Upload Dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-deps
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: bun-deps/
|
||||
|
||||
- name: Cache Dependencies
|
||||
@@ -166,6 +181,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: [x86_64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
@@ -180,7 +199,7 @@ jobs:
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-codegen
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
|
||||
path: build-codegen-win32-x64/
|
||||
|
||||
windows-cpp:
|
||||
@@ -189,6 +208,11 @@ jobs:
|
||||
runs-on: windows
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
cpu: [haswell, nehalem]
|
||||
arch: [x86_64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
|
||||
@@ -198,13 +222,12 @@ jobs:
|
||||
- name: Download Codegen
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-codegen
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
|
||||
path: build
|
||||
- name: Build C++
|
||||
run: |
|
||||
# Using SCCache is blocked by
|
||||
# https://github.com/mozilla/sccache/issues/1843
|
||||
# https://github.com/mozilla/sccache/pull/1856
|
||||
# Using SCCache was blocked by an issue that is fixed in a newer version.
|
||||
# TODO UPDATE
|
||||
# $sczip = "sccache-v0.6.0-x86_64-pc-windows-msvc"
|
||||
|
||||
# Invoke-WebRequest -Uri "https://github.com/mozilla/sccache/releases/download/v0.6.0/${sczip}.zip" -OutFile "${sczip}.zip"
|
||||
@@ -219,28 +242,30 @@ jobs:
|
||||
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
|
||||
.\scripts\env.ps1
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\build-libuv.ps1 -CloneOnly $True
|
||||
cd build
|
||||
# "-DCCACHE_PROGRAM=${SCCACHE}"
|
||||
# TODO(@paperdave): pass the proper revision of canary here. without it,
|
||||
# the properties window will display the wrong version.
|
||||
# not really a big deal for time being. should be resolved before release
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1
|
||||
-DBUN_CPP_ONLY=1 ${{ matrix.cpu == 'nehalem' && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
.\compile-cpp-only.ps1 -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-cpp
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: build/bun-cpp-objects.a
|
||||
|
||||
windows-link:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
cpu: [haswell, nehalem]
|
||||
arch: [x86_64]
|
||||
name: Link
|
||||
needs: [windows-dependencies, windows-codegen, windows-cpp, windows-zig]
|
||||
runs-on: windows-latest
|
||||
@@ -256,27 +281,27 @@ jobs:
|
||||
- name: Download Codegen
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-codegen
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
|
||||
path: build
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-deps
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: bun-deps
|
||||
- name: Download Zig Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-zig
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: bun-zig
|
||||
- name: Download C++ Objects
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-cpp
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: bun-cpp
|
||||
- name: Link
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
@@ -286,19 +311,20 @@ jobs:
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ=$(Resolve-Path ../bun-zig/bun-zig.o)"
|
||||
"-DBUN_ZIG_OBJ=$(Resolve-Path ../bun-zig/bun-zig.o)" `
|
||||
${{ matrix.cpu == 'nehalem' && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
- name: Package
|
||||
run: |
|
||||
$Dist = mkdir -Force "${{ env.tag }}"
|
||||
$Dist = mkdir -Force "${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive $Dist ${{ env.tag }}.zip
|
||||
Compress-Archive $Dist ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}
|
||||
path: ${{ env.tag }}.zip
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
@@ -315,7 +341,7 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{env.tag}}.zip"
|
||||
artifacts: "${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
|
||||
with:
|
||||
@@ -325,13 +351,111 @@ jobs:
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}
|
||||
|
||||
Build failed on ${{ env.tag }}:
|
||||
Build failed on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
|
||||
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
**[Build Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})** | [Commit](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
windows-test:
|
||||
name: Test
|
||||
runs-on: windows-latest
|
||||
needs: [windows-link]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# TODO: test baseline, disabled due to noise
|
||||
cpu: [haswell]
|
||||
arch: [x86_64]
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
- id: download
|
||||
name: Download Release
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
path: ${{runner.temp}}/release
|
||||
- name: Install Bun
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
|
||||
cd ${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# bun install --verbose
|
||||
# bun install --cwd=test --verbose
|
||||
# bun install --cwd=packages/bun-internal-test --verbose
|
||||
|
||||
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
npm install
|
||||
cd test && npm install
|
||||
cd ../packages/bun-internal-test && npm install
|
||||
cd ../..
|
||||
- id: test
|
||||
name: Run tests
|
||||
env:
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
run: |
|
||||
try {
|
||||
$ErrorActionPreference = "SilentlyContinue"
|
||||
$null = node packages/bun-internal-test/src/runner.node.mjs ${{runner.temp}}/release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}/bun.exe || $true
|
||||
} catch {}
|
||||
$ErrorActionPreference = "Stop"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: "failure"
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
### ❌🪟 [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
|
||||
${{ steps.test.outputs.regressing_tests }}
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
- name: Comment on PR
|
||||
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
message: |
|
||||
### ❌🪟 @${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
|
||||
${{ steps.test.outputs.regressing_tests }}
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.regressing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved.
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
run: exit 1
|
||||
45
.github/workflows/format.yml
vendored
Normal file
45
.github/workflows/format.yml
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
name: autofix.ci # Must be named this for autofix.ci to work
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.12.0-dev.1828+225fe6ddb
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
format:
|
||||
name: format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
packages
|
||||
test
|
||||
bench
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
with:
|
||||
version: ${{ env.ZIG_VERSION }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Format
|
||||
run: |
|
||||
bun fmt
|
||||
bun fmt:zig
|
||||
- name: Commit # https://autofix.ci/
|
||||
uses: autofix-ci/action@d3e591514b99d0fca6779455ff8338516663f7cc
|
||||
78
.github/workflows/prettier-fmt.yml
vendored
78
.github/workflows/prettier-fmt.yml
vendored
@@ -1,78 +0,0 @@
|
||||
name: prettier
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
prettier-fmt:
|
||||
name: prettier
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
outputs:
|
||||
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- id: install
|
||||
name: Install prettier
|
||||
run: bun install
|
||||
- name: Run prettier
|
||||
id: fmt
|
||||
run: |
|
||||
rm -f .failed
|
||||
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
|
||||
|
||||
if [ -s .failed ]; then
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `prettier` reported errors
|
||||
|
||||
```js
|
||||
${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
```sh
|
||||
bun fmt
|
||||
```
|
||||
|
||||
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `prettier` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
run: exit 1
|
||||
89
.github/workflows/zig-fmt.yml
vendored
89
.github/workflows/zig-fmt.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.12.0-dev.1297+a9e66ed73
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
paths:
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
outputs:
|
||||
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install zig
|
||||
run: |
|
||||
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
|
||||
tar -xf zig.tar.xz
|
||||
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
|
||||
|
||||
```zig
|
||||
// # zig fmt --check src/*.zig src/**/*.zig
|
||||
${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
|
||||
```sh
|
||||
zig fmt src/*.zig src/**/*.zig
|
||||
```
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `zig fmt` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -136,6 +136,7 @@ make-dev-stats.csv
|
||||
.uuid
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
test/js/bun/glob/fixtures
|
||||
*.lib
|
||||
*.pdb
|
||||
CMakeFiles
|
||||
@@ -158,4 +159,8 @@ x64
|
||||
/src/deps/libuv
|
||||
/build-*/
|
||||
|
||||
.vs
|
||||
.vs
|
||||
|
||||
**/.verdaccio-db.json
|
||||
/test-report.md
|
||||
/test-report.json
|
||||
7
.gitmodules
vendored
7
.gitmodules
vendored
@@ -76,3 +76,10 @@ ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/ls-hpack"]
|
||||
path = src/deps/ls-hpack
|
||||
url = https://github.com/litespeedtech/ls-hpack.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
@@ -11,3 +11,4 @@ test/snapshots-no-hmr
|
||||
test/js/deno/*.test.ts
|
||||
test/js/deno/**/*.test.ts
|
||||
bench/react-hello-world/react-hello-world.node.js
|
||||
test/cli/run/encoding-utf16-le-bom.ts
|
||||
|
||||
45
.vscode/launch.json
generated
vendored
45
.vscode/launch.json
generated
vendored
@@ -3,9 +3,22 @@
|
||||
// It will force the garbage collector to run after every test and every call to expect()
|
||||
// it makes our tests very slow
|
||||
// But it helps catch memory bugs
|
||||
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "sharp",
|
||||
"program": "bun-debug",
|
||||
"args": ["install", "sharp"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "/tmp/scratchpad_20230911T213851",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -15,9 +28,9 @@
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
@@ -35,7 +48,6 @@
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -124,7 +136,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun run [file]",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "${file}", "${file}"],
|
||||
"args": ["run", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -307,7 +319,7 @@
|
||||
"name": "bun install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "/Users/jarred/Build/worky",
|
||||
"cwd": "${fileDirname}",
|
||||
"console": "internalConsole",
|
||||
"env": {}
|
||||
},
|
||||
@@ -337,6 +349,27 @@
|
||||
"args": ["abc"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug REPL",
|
||||
"program": "${workspaceFolder}/build/bun-debug",
|
||||
"args": ["/Users/dave/.bun/bin/bun-repl"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
},
|
||||
"terminal": "integrated"
|
||||
},
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
"args": ["run", "${file}"],
|
||||
"cwd": "${fileDirname}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
13
.vscode/settings.json
vendored
13
.vscode/settings.json
vendored
@@ -3,7 +3,12 @@
|
||||
"search.quickOpen.includeSymbols": false,
|
||||
"search.seedWithNearestWord": true,
|
||||
"search.smartCase": true,
|
||||
"search.exclude": {},
|
||||
"search.exclude": {
|
||||
"node_modules": true,
|
||||
"src/bun.js/WebKit": true,
|
||||
".git": true,
|
||||
"src/deps/*/**": true
|
||||
},
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
@@ -24,6 +29,7 @@
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"zig.zls.enableInlayHints": false,
|
||||
"zig.path": "${workspaceFolder}/.cache/zig/zig.exe",
|
||||
"git.ignoreSubmodules": true,
|
||||
"[jsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
@@ -68,6 +74,7 @@
|
||||
"src/deps/c-ares": true,
|
||||
"src/deps/tinycc": true,
|
||||
"src/deps/zstd": true,
|
||||
"**/*.i": true,
|
||||
"packages/bun-uws/fuzzing/seed-corpus/**/*": true
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
@@ -218,5 +225,7 @@
|
||||
},
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"eslint.workingDirectories": ["packages/bun-types"],
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"cmake.configureOnOpen": false,
|
||||
"git.ignoreLimitWarning": true
|
||||
}
|
||||
|
||||
209
CMakeLists.txt
209
CMakeLists.txt
@@ -2,8 +2,8 @@ cmake_minimum_required(VERSION 3.22)
|
||||
cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(Bun_VERSION "1.0.11")
|
||||
set(WEBKIT_TAG 7cd84abfa787e4b96b27c3ef1e28a4eb1aa49aa1)
|
||||
set(Bun_VERSION "1.0.23")
|
||||
set(WEBKIT_TAG 9c501b9aa712b7959f80dc99491e8758c151c20e)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -24,15 +24,19 @@ endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(DEBUG ON)
|
||||
set(ZIG_OPTIMIZE "Debug")
|
||||
set(DEFAULT_ZIG_OPTIMIZE "Debug")
|
||||
set(bun "bun-debug")
|
||||
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(DEBUG OFF)
|
||||
set(ZIG_OPTIMIZE "ReleaseFast")
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
|
||||
|
||||
if(WIN32)
|
||||
# lld-link will strip it for you, so we can build directly to bun.exe
|
||||
set(bun "bun")
|
||||
|
||||
# TODO(@paperdave): Remove this
|
||||
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
|
||||
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
|
||||
else()
|
||||
set(bun "bun-profile")
|
||||
endif()
|
||||
@@ -216,11 +220,20 @@ if(DEFINED ENV{CI} OR DEFINED ENV{GITHUB_ACTIONS})
|
||||
endif()
|
||||
|
||||
set(DEFAULT_USE_STATIC_LIBATOMIC ON)
|
||||
set(DEFAULT_USE_DEBUG_JSC, OFF)
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(DEFAULT_USE_DEBUG_JSC ON)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
set(DEFAULT_USE_DEBUG_JSC OFF)
|
||||
endif()
|
||||
|
||||
if(UNIX AND NOT APPLE)
|
||||
execute_process(COMMAND cat /etc/os-release COMMAND head -n1 OUTPUT_VARIABLE LINUX_DISTRO)
|
||||
|
||||
if(${LINUX_DISTRO} STREQUAL "NAME=\"Arch Linux\"\n")
|
||||
if(${LINUX_DISTRO} MATCHES "NAME=\"(Arch|Manjaro) Linux\"\n")
|
||||
set(DEFAULT_USE_STATIC_LIBATOMIC OFF)
|
||||
endif()
|
||||
endif()
|
||||
@@ -237,16 +250,35 @@ option(USE_CUSTOM_BASE64 "Use Bun's recommended version of libbase64" ON)
|
||||
option(USE_CUSTOM_LOLHTML "Use Bun's recommended version of lolhtml" ON)
|
||||
option(USE_CUSTOM_TINYCC "Use Bun's recommended version of tinycc" ON)
|
||||
option(USE_CUSTOM_LIBUV "Use Bun's recommended version of libuv (Windows only)" ON)
|
||||
option(USE_CUSTOM_LSHPACK "Use Bun's recommended version of ls-hpack" ON)
|
||||
option(USE_BASELINE_BUILD "Build Bun for baseline (older) CPUs" OFF)
|
||||
|
||||
option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore" OFF)
|
||||
option(USE_VALGRIND "Build Bun with Valgrind support (Linux only)" OFF)
|
||||
|
||||
option(ZIG_OPTIMIZE "Optimization level for Zig" ${DEFAULT_ZIG_OPTIMIZE})
|
||||
option(USE_DEBUG_JSC "Enable assertions and use a debug build of JavaScriptCore" ${DEFAULT_USE_DEBUG_JSC})
|
||||
option(USE_UNIFIED_SOURCES "Use unified sources to speed up the build" OFF)
|
||||
option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of libatomic.a" ${DEFAULT_USE_STATIC_LIBATOMIC})
|
||||
|
||||
if(USE_VALGRIND)
|
||||
# Disable SIMD
|
||||
set(USE_BASELINE_BUILD ON)
|
||||
|
||||
if(ARCH STREQUAL "x86_64")
|
||||
# This is for picohttpparser
|
||||
# Valgrind cannot handle SSE4.2 instructions
|
||||
add_compile_definitions("__SSE4_2__=0")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT CANARY)
|
||||
set(CANARY 0)
|
||||
endif()
|
||||
|
||||
if(NOT ZIG_OPTIMIZE)
|
||||
set(ZIG_OPTIMIZE ${DEFAULT_ZIG_OPTIMIZE})
|
||||
endif()
|
||||
|
||||
set(ERROR_LIMIT 100 CACHE STRING "Maximum number of errors to show when compiling C++ code")
|
||||
|
||||
set(ARCH x86_64)
|
||||
@@ -309,8 +341,34 @@ function(validate_zig validator_result_var item)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
find_program(ZIG_COMPILER zig ${REQUIRED_IF_NOT_ONLY_CPP_OR_LINK} DOC "Path to the Zig compiler" VALIDATOR validate_zig)
|
||||
message(STATUS "Found Zig Compiler: ${ZIG_COMPILER}")
|
||||
if(ZIG_COMPILER)
|
||||
if(ZIG_COMPILER STREQUAL "system")
|
||||
message(STATUS "Using system Zig compiler")
|
||||
unset(ZIG_COMPILER_)
|
||||
endif()
|
||||
|
||||
find_program(ZIG_COMPILER_ zig ${REQUIRED_IF_NOT_ONLY_CPP_OR_LINK} DOC "Path to the Zig compiler" VALIDATOR validate_zig)
|
||||
set(ZIG_COMPILER "${ZIG_COMPILER_}")
|
||||
message(STATUS "Found Zig Compiler: ${ZIG_COMPILER}")
|
||||
elseif(NOT BUN_CPP_ONLY AND NOT BUN_LINK_ONLY)
|
||||
execute_process(
|
||||
COMMAND "${SHELL}"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/scripts/download-zig.${SCRIPT_EXTENSION}"
|
||||
)
|
||||
set(ZIG_COMPILER "${CMAKE_CURRENT_SOURCE_DIR}/.cache/zig/zig")
|
||||
|
||||
if(WIN32)
|
||||
set(ZIG_COMPILER "${ZIG_COMPILER}.exe")
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${ZIG_COMPILER}")
|
||||
unset(ZIG_COMPILER)
|
||||
message(FATAL_ERROR "Auto-installation of Zig failed. Please pass -DZIG_COMPILER=system or a path to the Zig")
|
||||
endif()
|
||||
|
||||
message(STATUS "Installed Zig Compiler: ${ZIG_COMPILER}")
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
endif()
|
||||
|
||||
# Bun
|
||||
if(NOT WIN32)
|
||||
@@ -362,6 +420,7 @@ if(NOT WEBKIT_DIR)
|
||||
set(ASSERT_ENABLED "0")
|
||||
|
||||
if(USE_DEBUG_JSC)
|
||||
add_compile_definitions("BUN_DEBUG=1")
|
||||
set(BUN_WEBKIT_PACKAGE_NAME_SUFFIX "-debug")
|
||||
set(ASSERT_ENABLED "1")
|
||||
elseif(NOT DEBUG AND NOT WIN32)
|
||||
@@ -404,34 +463,23 @@ if(NOT WEBKIT_DIR)
|
||||
elseif(WEBKIT_DIR STREQUAL "omit")
|
||||
message(STATUS "Not using WebKit. This is only valid if you are only trying to build Zig code")
|
||||
else()
|
||||
# Setting WEBKIT_DIR means you either have a path to the WebKit repo, or you have a path to packaged webkit
|
||||
# Non-packaged webkit has CMakeLists.txt
|
||||
if(EXISTS "${WEBKIT_DIR}/CMakeLists.txt")
|
||||
# Since we may be doing a Debug build of Bun but with a Release build of JSC, we can't
|
||||
# include their CMakeLists directly here, but rather we need to run `cmake` as a dependency
|
||||
# of our build. It'll still have decent caching which is what really matters.
|
||||
# Expected to be WebKit/WebKitBuild/${CMAKE_BUILD_TYPE}
|
||||
if(EXISTS "${WEBKIT_DIR}/cmakeconfig.h")
|
||||
# You may need to run:
|
||||
# make jsc-compile-debug jsc-copy-headers
|
||||
include_directories(
|
||||
"${WEBKIT_DIR}/"
|
||||
"${WEBKIT_DIR}/JavaScriptCore/Headers/JavaScriptCore"
|
||||
"${WEBKIT_DIR}/JavaScriptCore/PrivateHeaders"
|
||||
"${WEBKIT_DIR}/bmalloc/Headers"
|
||||
"${WEBKIT_DIR}/WTF/Headers"
|
||||
)
|
||||
set(WEBKIT_LIB_DIR "${WEBKIT_DIR}/lib")
|
||||
|
||||
# cmake WEBKIT_DIR -B WEBKIT_DIR/WebKitBuild/WEBKIT_BUILD_TYPE
|
||||
# -DPORT=JSCOnly
|
||||
# -DENABLE_STATIC_JSC=ON
|
||||
# -DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON
|
||||
# -DCMAKE_BUILD_TYPE=Debug
|
||||
# -DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON
|
||||
# -DUSE_THIN_ARCHIVES=OFF
|
||||
# -DENABLE_FTL_JIT=ON
|
||||
# -DCMAKE_C_COMPILER=(which clang-16)
|
||||
# -DDCMAKE_CXX_COMPILER=(which clang++-16)
|
||||
# -DDUSE_BUN_JSC_ADDITIONS=1
|
||||
# -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld"
|
||||
# -DCMAKE_AR=$(which llvm-ar)
|
||||
# -DCMAKE_RANLIB=$(which llvm-ranlib)
|
||||
# -DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON
|
||||
# -G Ninja
|
||||
# -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0
|
||||
# -DPTHREAD_JIT_PERMISSIONS_API=1
|
||||
# -DUSE_PTHREAD_JIT_PERMISSIONS_API=ON
|
||||
# -DENABLE_REMOTE_INSPECTOR=ON
|
||||
message(FATAL_ERROR "TODO: Setting WEBKIT_DIR to the WebKit repository to enable automatic builds. For now you need to run the release script, and point to the packaged directory.")
|
||||
if(USE_DEBUG_JSC)
|
||||
add_compile_definitions("BUN_DEBUG=1")
|
||||
set(ASSERT_ENABLED "1")
|
||||
endif()
|
||||
else()
|
||||
if(NOT EXISTS "${WEBKIT_DIR}/lib/${libWTF}.${STATIC_LIB_EXT}" OR NOT EXISTS "${WEBKIT_DIR}/lib/${libJavaScriptCore}.${STATIC_LIB_EXT}")
|
||||
if(WEBKIT_DIR MATCHES "src/bun.js/WebKit$")
|
||||
@@ -494,6 +542,17 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
|
||||
|
||||
# -- Brotli --
|
||||
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
|
||||
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
|
||||
"${BROTLI_SRC}/common/*.c"
|
||||
"${BROTLI_SRC}/enc/*.c"
|
||||
"${BROTLI_SRC}/dec/*.c"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES})
|
||||
include_directories("${BUN_DEPS_DIR}/brotli/include")
|
||||
|
||||
# -- uSockets --
|
||||
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
|
||||
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
|
||||
"${USOCKETS_SRC}/*.c"
|
||||
@@ -698,9 +757,10 @@ endif()
|
||||
# --- Zig Object ---
|
||||
file(GLOB ZIG_FILES
|
||||
"${BUN_SRC}/*.zig"
|
||||
"${BUN_SRC}/**/*.zig"
|
||||
"${BUN_SRC}/**/**/*.zig"
|
||||
"${BUN_SRC}/**/**/**/*.zig"
|
||||
"${BUN_SRC}/*/*.zig"
|
||||
"${BUN_SRC}/*/*/*.zig"
|
||||
"${BUN_SRC}/*/*/*/*.zig"
|
||||
"${BUN_SRC}/*/*/*/*/*.zig"
|
||||
)
|
||||
|
||||
if(NOT BUN_ZIG_OBJ)
|
||||
@@ -805,12 +865,13 @@ add_compile_definitions(
|
||||
"NOMINMAX"
|
||||
"IS_BUILD"
|
||||
"BUILDING_JSCONLY__"
|
||||
"ASSERT_ENABLED=$<IF:$<CONFIG:ASSERT_ENABLED>,1,0>"
|
||||
"BUN_DYNAMIC_JS_LOAD_PATH=\"${BUN_WORKDIR}/js\""
|
||||
)
|
||||
|
||||
if(NOT ASSERT_ENABLED)
|
||||
add_compile_definitions("NDEBUG=1")
|
||||
else()
|
||||
add_compile_definitions("ASSERT_ENABLED=1")
|
||||
endif()
|
||||
|
||||
include_directories(
|
||||
@@ -868,15 +929,23 @@ endif()
|
||||
# --- clang and linker flags ---
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
if(NOT WIN32)
|
||||
target_compile_options(${bun} PUBLIC -g3 -O1 -gdwarf-4)
|
||||
target_compile_options(${bun} PUBLIC -g3 -O0 -gdwarf-4
|
||||
-Werror=return-type
|
||||
-Werror=return-stack-address
|
||||
-Werror=implicit-function-declaration
|
||||
)
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC -O1)
|
||||
target_compile_options(${bun} PUBLIC /Od)
|
||||
endif()
|
||||
|
||||
add_compile_definitions("BUN_DEBUG=1")
|
||||
elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
if(NOT WIN32)
|
||||
target_compile_options(${bun} PUBLIC -O3 -flto=full -emit-llvm)
|
||||
target_compile_options(${bun} PUBLIC -O3 -flto=full -emit-llvm -g1
|
||||
-Werror=return-type
|
||||
-Werror=return-stack-address
|
||||
-Werror=implicit-function-declaration
|
||||
)
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC /O2 -flto=full)
|
||||
target_link_options(${bun} PUBLIC /LTCG)
|
||||
@@ -910,9 +979,11 @@ if(WIN32)
|
||||
"BORINGSSL_NO_CXX=1" # lol
|
||||
)
|
||||
|
||||
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
|
||||
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
|
||||
|
||||
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-")
|
||||
target_link_options(${bun} PUBLIC "/STACK:4194304,2097152")
|
||||
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000")
|
||||
else()
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fPIC
|
||||
@@ -1019,7 +1090,7 @@ if(APPLE)
|
||||
endif()
|
||||
|
||||
# --- Stripped Binary "bun"
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32)
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED)
|
||||
# add_custom_command(
|
||||
# TARGET ${bun}
|
||||
# POST_BUILD
|
||||
@@ -1100,11 +1171,21 @@ if(USE_CUSTOM_MIMALLOC)
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/mimalloc.lib")
|
||||
elseif(APPLE)
|
||||
# https://github.com/microsoft/mimalloc/issues/512
|
||||
# Linking mimalloc via object file on macOS x64 can cause heap corruption
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.a")
|
||||
if(USE_DEBUG_JSC OR CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(STATUS "Using debug mimalloc")
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc-debug.a")
|
||||
else()
|
||||
# https://github.com/microsoft/mimalloc/issues/512
|
||||
# Linking mimalloc via object file on macOS x64 can cause heap corruption
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.a")
|
||||
endif()
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.o")
|
||||
if(USE_DEBUG_JSC OR CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(STATUS "Using debug mimalloc")
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc-debug.a")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libmimalloc.o")
|
||||
endif()
|
||||
endif()
|
||||
else()
|
||||
find_package(mimalloc REQUIRED)
|
||||
@@ -1150,17 +1231,15 @@ else()
|
||||
target_link_libraries(${bun} PRIVATE base64::base64)
|
||||
endif()
|
||||
|
||||
if(NOT WIN32)
|
||||
if(USE_CUSTOM_TINYCC)
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/tcc.lib")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libtcc.a")
|
||||
endif()
|
||||
if(USE_CUSTOM_TINYCC)
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/tcc.lib")
|
||||
else()
|
||||
find_package(tinycc REQUIRED)
|
||||
target_link_libraries(${bun} PRIVATE tinycc::tinycc)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libtcc.a")
|
||||
endif()
|
||||
else()
|
||||
find_package(tinycc REQUIRED)
|
||||
target_link_libraries(${bun} PRIVATE tinycc::tinycc)
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_LOLHTML)
|
||||
@@ -1204,6 +1283,17 @@ else()
|
||||
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
|
||||
endif()
|
||||
|
||||
if(USE_CUSTOM_LSHPACK)
|
||||
if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/lshpack.lib")
|
||||
else()
|
||||
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/liblshpack.a")
|
||||
endif()
|
||||
else()
|
||||
find_package(lshpack REQUIRED)
|
||||
target_link_libraries(${bun} PRIVATE lshpack)
|
||||
endif()
|
||||
|
||||
if(NOT WIN32)
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libWTF.a")
|
||||
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
|
||||
@@ -1222,9 +1312,14 @@ else()
|
||||
ucrt
|
||||
userenv
|
||||
dbghelp
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
# delayimp -delayload:shell32.dll -delayload:ole32.dll
|
||||
endif()
|
||||
|
||||
if(BUN_LINK_ONLY)
|
||||
message(STATUS "NOTE: BUN_LINK_ONLY is ON, this build config will only link the Bun executable")
|
||||
endif()
|
||||
|
||||
@@ -18,7 +18,7 @@ The JavaScript transpiler & module resolver is mostly independent from the runti
|
||||
|
||||
## Getting started
|
||||
|
||||
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
|
||||
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/contributing) to get your dev environment setup!
|
||||
|
||||
## Memory management in Bun
|
||||
|
||||
|
||||
116
Dockerfile
116
Dockerfile
@@ -19,10 +19,13 @@ ARG GIT_SHA=""
|
||||
ARG BUN_VERSION="bun-v1.0.7"
|
||||
ARG BUN_DOWNLOAD_URL_BASE="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${BUN_VERSION}"
|
||||
ARG CANARY=0
|
||||
ARG ASSERTIONS=OFF
|
||||
ARG ZIG_OPTIMIZE=ReleaseFast
|
||||
ARG CMAKE_BUILD_TYPE=Release
|
||||
|
||||
ARG NODE_VERSION="20"
|
||||
ARG LLVM_VERSION="16"
|
||||
ARG ZIG_VERSION="0.12.0-dev.1297+a9e66ed73"
|
||||
ARG ZIG_VERSION="0.12.0-dev.1828+225fe6ddb"
|
||||
|
||||
ARG SCCACHE_BUCKET
|
||||
ARG SCCACHE_REGION
|
||||
@@ -178,6 +181,7 @@ FROM bun-base as mimalloc
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ARG ASSERTIONS
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
@@ -185,8 +189,23 @@ COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
|
||||
ENV CCACHE_DIR=/ccache
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
|
||||
make mimalloc && rm -rf src/deps/mimalloc Makefile
|
||||
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
|
||||
make mimalloc && rm -rf src/deps/mimalloc Makefile;
|
||||
|
||||
FROM bun-base as mimalloc-debug
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ARG ASSERTIONS
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
|
||||
ENV CCACHE_DIR=/ccache
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
|
||||
make mimalloc-debug && rm -rf src/deps/mimalloc Makefile;
|
||||
|
||||
FROM bun-base as zlib
|
||||
|
||||
@@ -277,6 +296,22 @@ WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
|
||||
|
||||
FROM bun-base as ls-hpack
|
||||
|
||||
ARG BUN_DIR
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
ENV CCACHE_DIR=/ccache
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make lshpack
|
||||
|
||||
FROM bun-base-with-zig as bun-identifier-cache
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
@@ -310,18 +345,21 @@ RUN cd $BUN_DIR/src/node-fallbacks \
|
||||
FROM bun-base as bun-webkit
|
||||
|
||||
ARG BUILDARCH
|
||||
ARG ASSERTIONS
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
|
||||
RUN mkdir ${BUN_DIR}/bun-webkit \
|
||||
&& WEBKIT_TAG=$(grep 'set(WEBKIT_TAG' "${BUN_DIR}/CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')') \
|
||||
&& WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_TAG}/bun-webkit-linux-${BUILDARCH}-lto.tar.gz" \
|
||||
&& WEBKIT_SUFFIX=$(if [ "${ASSERTIONS}" = "ON" ]; then echo "debug"; else echo "lto"; fi) \
|
||||
&& WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_TAG}/bun-webkit-linux-${BUILDARCH}-${WEBKIT_SUFFIX}.tar.gz" \
|
||||
&& echo "Downloading ${WEBKIT_URL}" \
|
||||
&& curl -fsSL "${WEBKIT_URL}" | tar -xz -C ${BUN_DIR}/bun-webkit --strip-components=1
|
||||
|
||||
FROM bun-base as bun-cpp-objects
|
||||
|
||||
ARG CANARY
|
||||
ARG ASSERTIONS
|
||||
|
||||
COPY --from=bun-webkit ${BUN_DIR}/bun-webkit ${BUN_DIR}/bun-webkit
|
||||
|
||||
@@ -335,14 +373,14 @@ ENV CCACHE_DIR=/ccache
|
||||
RUN --mount=type=cache,target=/ccache mkdir ${BUN_DIR}/build \
|
||||
&& cd ${BUN_DIR}/build \
|
||||
&& mkdir -p tmp_modules tmp_functions js codegen \
|
||||
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} \
|
||||
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
|
||||
&& bash compile-cpp-only.sh -v
|
||||
|
||||
FROM bun-base-with-zig as bun-codegen-for-zig
|
||||
|
||||
COPY package.json bun.lockb Makefile .gitmodules .prettierrc.cjs ${BUN_DIR}/
|
||||
COPY src/runtime ${BUN_DIR}/src/runtime
|
||||
COPY src/runtime.js src/runtime.footer*.js src/react-refresh.js ${BUN_DIR}/src/
|
||||
COPY src/runtime.js src/runtime.bun.js ${BUN_DIR}/src/
|
||||
COPY packages/bun-error ${BUN_DIR}/packages/bun-error
|
||||
COPY src/fallback.ts ${BUN_DIR}/src/fallback.ts
|
||||
COPY src/api ${BUN_DIR}/src/api
|
||||
@@ -361,6 +399,8 @@ ARG TRIPLET
|
||||
ARG GIT_SHA
|
||||
ARG CPU_TARGET
|
||||
ARG CANARY=0
|
||||
ARG ASSERTIONS=OFF
|
||||
ARG ZIG_OPTIMIZE=ReleaseFast
|
||||
|
||||
COPY *.zig package.json CMakeLists.txt ${BUN_DIR}/
|
||||
COPY completions ${BUN_DIR}/completions
|
||||
@@ -380,6 +420,7 @@ RUN mkdir -p build \
|
||||
&& cmake .. \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
|
||||
-DCPU_TARGET="${CPU_TARGET}" \
|
||||
-DZIG_TARGET="${TRIPLET}" \
|
||||
-DWEBKIT_DIR="omit" \
|
||||
@@ -387,6 +428,7 @@ RUN mkdir -p build \
|
||||
-DNO_CODEGEN=1 \
|
||||
-DBUN_ZIG_OBJ="/tmp/bun-zig.o" \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
&& ONLY_ZIG=1 ninja "/tmp/bun-zig.o" -v
|
||||
|
||||
FROM scratch as build_release_obj
|
||||
@@ -400,6 +442,7 @@ FROM bun-base as bun-link
|
||||
|
||||
ARG CPU_TARGET
|
||||
ARG CANARY
|
||||
ARG ASSERTIONS
|
||||
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
@@ -422,6 +465,7 @@ COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
@@ -433,12 +477,14 @@ RUN cmake .. \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
|
||||
-DUSE_DEBUG_JSC=${ASSERTIONS} \
|
||||
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
|
||||
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
|
||||
-DBUN_DEPS_OUT_DIR="${BUN_DEPS_OUT_DIR}" \
|
||||
-DCPU_TARGET="${CPU_TARGET}" \
|
||||
-DNO_CONFIGURE_DEPENDS=1 \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
&& ninja -v \
|
||||
&& ./bun --revision \
|
||||
&& mkdir -p /build/out \
|
||||
@@ -447,4 +493,60 @@ RUN cmake .. \
|
||||
|
||||
FROM scratch as artifact
|
||||
|
||||
COPY --from=bun-link /build/out /
|
||||
COPY --from=bun-link /build/out /
|
||||
|
||||
FROM bun-base as bun-link-assertions
|
||||
|
||||
ARG CPU_TARGET
|
||||
ARG CANARY
|
||||
ARG ASSERTIONS
|
||||
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN mkdir -p build bun-webkit
|
||||
|
||||
# lol
|
||||
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=mimalloc-debug ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
|
||||
WORKDIR $BUN_DIR/build
|
||||
|
||||
RUN cmake .. \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
|
||||
-DUSE_DEBUG_JSC=ON \
|
||||
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
|
||||
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
|
||||
-DBUN_DEPS_OUT_DIR="${BUN_DEPS_OUT_DIR}" \
|
||||
-DCPU_TARGET="${CPU_TARGET}" \
|
||||
-DNO_CONFIGURE_DEPENDS=1 \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
&& ninja -v \
|
||||
&& ./bun --revision \
|
||||
&& mkdir -p /build/out \
|
||||
&& mv bun bun-profile /build/out \
|
||||
&& rm -rf ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
FROM scratch as artifact-assertions
|
||||
|
||||
COPY --from=bun-link-assertions /build/out /
|
||||
34
Makefile
34
Makefile
@@ -455,7 +455,8 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
|
||||
-lusockets \
|
||||
-lcares \
|
||||
-lzstd \
|
||||
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
|
||||
$(BUN_DEPS_OUT_DIR)/libuwsockets.o \
|
||||
$(BUN_DEPS_OUT_DIR)/liblshpack.a
|
||||
|
||||
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
|
||||
|
||||
@@ -672,7 +673,7 @@ assert-deps:
|
||||
@echo "You have the dependencies installed! Woo"
|
||||
|
||||
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
|
||||
# unless you explicity clone it yourself (a huge download)
|
||||
# unless you explicitly clone it yourself (a huge download)
|
||||
SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}')
|
||||
ifeq ("$(wildcard src/bun.js/WebKit/.git)", "")
|
||||
SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES))
|
||||
@@ -749,12 +750,22 @@ wasm: api mimalloc-wasm build-obj-wasm-small
|
||||
build-obj-safe:
|
||||
$(ZIG) build obj -Doptimize=ReleaseSafe -Dcpu="$(CPU_TARGET)"
|
||||
|
||||
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
|
||||
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
|
||||
UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions -fno-rtti
|
||||
UWS_LDFLAGS = -I$(BUN_DEPS_DIR)/boringssl/include -I$(ZLIB_INCLUDE_DIR)
|
||||
USOCKETS_DIR = $(BUN_DIR)/packages/bun-usockets
|
||||
USOCKETS_SRC_DIR = $(USOCKETS_DIR)/src
|
||||
|
||||
|
||||
LSHPACK_SRC_DIR = $(BUN_DEPS_DIR)/ls-hpack
|
||||
LSHPACK_CC_FLAGS = -DXXH_HEADER_NAME="<xxhash.h>"
|
||||
LSHPACK_LDFLAGS = -I$(LSHPACK_SRC_DIR) -I$(LSHPACK_SRC_DIR)/deps/xxhash
|
||||
|
||||
lshpack:
|
||||
rm -rf $(LSHPACK_SRC_DIR)/*.i $(LSHPACK_SRC_DIR)/*.bc $(LSHPACK_SRC_DIR)/*.o $(LSHPACK_SRC_DIR)/*.s $(LSHPACK_SRC_DIR)/*.ii $(LSHPACK_SRC_DIR)/*.s
|
||||
cd $(LSHPACK_SRC_DIR) && $(CC_WITH_CCACHE) -I$(LSHPACK_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(LSHPACK_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/lshpack/src $(LSHPACK_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(LSHPACK_SRC_DIR)/lshpack.c) $(wildcard $(LSHPACK_SRC_DIR)/deps/**/*.c)
|
||||
cd $(LSHPACK_SRC_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/liblshpack.a $(LSHPACK_SRC_DIR)/*.{o,bc}
|
||||
|
||||
usockets:
|
||||
rm -rf $(USOCKETS_DIR)/*.i $(USOCKETS_DIR)/*.bc $(USOCKETS_DIR)/*.o $(USOCKETS_DIR)/*.s $(USOCKETS_DIR)/*.ii $(USOCKETS_DIR)/*.s $(BUN_DEPS_OUT_DIR)/libusockets.a
|
||||
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
|
||||
@@ -825,17 +836,7 @@ fallback_decoder:
|
||||
|
||||
.PHONY: runtime_js
|
||||
runtime_js:
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
|
||||
|
||||
.PHONY: runtime_js_dev
|
||||
runtime_js_dev:
|
||||
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
|
||||
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
|
||||
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
|
||||
@NODE_ENV=development $(ESBUILD) --define:process.env.NODE_ENV="development" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime.bun.js --format=esm --platform=node --minify --external:/bun:* > src/runtime.out.js
|
||||
|
||||
.PHONY: bun_error
|
||||
bun_error:
|
||||
@@ -1246,6 +1247,7 @@ jsc-build-mac-compile-debug:
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
|
||||
-G Ninja \
|
||||
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
|
||||
@@ -1355,7 +1357,7 @@ mimalloc-debug:
|
||||
-GNinja \
|
||||
. \
|
||||
&& ninja
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(_MIMALLOC_DEBUG_FILE)
|
||||
|
||||
|
||||
# mimalloc is built as object files so that it can overload the system malloc on linux
|
||||
@@ -1895,7 +1897,7 @@ cold-jsc-start:
|
||||
misctools/cold-jsc-start.cpp -o cold-jsc-start
|
||||
|
||||
.PHONY: vendor-without-npm
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws lshpack tinycc c-ares zstd base64
|
||||
|
||||
|
||||
.PHONY: vendor-without-check
|
||||
|
||||
@@ -128,7 +128,7 @@ bun upgrade --canary
|
||||
|
||||
## Contributing
|
||||
|
||||
Refer to the [Project > Development](https://bun.sh/docs/project/development) guide to start contributing to Bun.
|
||||
Refer to the [Project > Contributing](https://bun.sh/docs/project/contributing) guide to start contributing to Bun.
|
||||
|
||||
## License
|
||||
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,7 +1,7 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
|
||||
// Pseudo RNG is derived from https://stackoverflow.com/a/424445
|
||||
let rngState = 123456789;
|
||||
function nextInt() {
|
||||
const m = 0x80000000; // 2**31;
|
||||
|
||||
19
bench/glob/match.mjs
Normal file
19
bench/glob/match.mjs
Normal file
@@ -0,0 +1,19 @@
|
||||
import micromatch from "micromatch";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
|
||||
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
|
||||
doMatch("foo/bar.js", "**/*.js");
|
||||
});
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
|
||||
doMatch("bar.js", "*.js");
|
||||
});
|
||||
|
||||
await run({
|
||||
avg: true,
|
||||
min_max: true,
|
||||
percentiles: true,
|
||||
});
|
||||
113
bench/glob/scan.mjs
Normal file
113
bench/glob/scan.mjs
Normal file
@@ -0,0 +1,113 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import fg from "fast-glob";
|
||||
import { fdir } from "fdir";
|
||||
|
||||
const normalPattern = "*.ts";
|
||||
const recursivePattern = "**/*.ts";
|
||||
const nodeModulesPattern = "**/node_modules/**/*.js";
|
||||
|
||||
const benchFdir = false;
|
||||
const cwd = undefined;
|
||||
|
||||
const bunOpts = {
|
||||
cwd,
|
||||
followSymlinks: false,
|
||||
absolute: true,
|
||||
};
|
||||
|
||||
const fgOpts = {
|
||||
cwd,
|
||||
followSymbolicLinks: false,
|
||||
onlyFiles: false,
|
||||
absolute: true,
|
||||
};
|
||||
|
||||
const Glob = "Bun" in globalThis ? globalThis.Bun.Glob : undefined;
|
||||
|
||||
group({ name: `async pattern="${normalPattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", async () => {
|
||||
const entries = await fg.glob([normalPattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", async () => {
|
||||
const entries = await Array.fromAsync(new Glob(normalPattern).scan(bunOpts));
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = await new fdir().withFullPaths().glob(normalPattern).crawl(process.cwd()).withPromise();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `async-recursive pattern="${recursivePattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", async () => {
|
||||
const entries = await fg.glob([recursivePattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", async () => {
|
||||
const entries = await Array.fromAsync(new Glob(recursivePattern).scan(bunOpts));
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = await new fdir().withFullPaths().glob(recursivePattern).crawl(process.cwd()).withPromise();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `sync pattern="${normalPattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", () => {
|
||||
const entries = fg.globSync([normalPattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", () => {
|
||||
const entries = [...new Glob(normalPattern).scanSync(bunOpts)];
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = new fdir().withFullPaths().glob(normalPattern).crawl(process.cwd()).sync();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `sync-recursive pattern="${recursivePattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", () => {
|
||||
const entries = fg.globSync([recursivePattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", () => {
|
||||
const entries = [...new Glob(recursivePattern).scanSync(bunOpts)];
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = new fdir().withFullPaths().glob(recursivePattern).crawl(process.cwd()).sync();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `node_modules pattern="${nodeModulesPattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", async () => {
|
||||
const entries = await fg.glob([nodeModulesPattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", async () => {
|
||||
const entries = await Array.fromAsync(new Glob(nodeModulesPattern).scan(bunOpts));
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = await new fdir().withFullPaths().glob(nodeModulesPattern).crawl(process.cwd()).withPromise();
|
||||
});
|
||||
});
|
||||
|
||||
await run({
|
||||
avg: true,
|
||||
colors: false,
|
||||
min_max: true,
|
||||
collect: true,
|
||||
percentiles: true,
|
||||
});
|
||||
@@ -7,6 +7,8 @@
|
||||
"benchmark": "^2.1.4",
|
||||
"esbuild": "^0.14.12",
|
||||
"eventemitter3": "^5.0.0",
|
||||
"fast-glob": "3.3.1",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^0.1.6"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
54
bench/snippets/buffer-concat.mjs
Normal file
54
bench/snippets/buffer-concat.mjs
Normal file
@@ -0,0 +1,54 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
|
||||
const first = Buffer.allocUnsafe(size);
|
||||
const second = Buffer.allocUnsafe(size);
|
||||
const third = Buffer.allocUnsafe(size);
|
||||
first.fill(1);
|
||||
second.fill(2);
|
||||
third.fill(3);
|
||||
|
||||
const check = true;
|
||||
|
||||
const buffers = [first, second, third];
|
||||
|
||||
const fmt =
|
||||
size > 1024 * 1024
|
||||
? new Intl.NumberFormat(undefined, { unit: "megabyte", style: "unit" })
|
||||
: size > 1024
|
||||
? new Intl.NumberFormat(undefined, { unit: "kilobyte", style: "unit" })
|
||||
: new Intl.NumberFormat(undefined, { unit: "byte", style: "unit" });
|
||||
|
||||
bench(
|
||||
`Buffer.concat(${fmt.format(
|
||||
Number((size > 1024 * 1024 ? size / 1024 / 1024 : size > 1024 ? size / 1024 : size).toFixed(2)),
|
||||
)} x 3)`,
|
||||
() => {
|
||||
const result = Buffer.concat(buffers);
|
||||
if (check) {
|
||||
if (result.byteLength != size * 3) throw new Error("Wrong length");
|
||||
if (result[0] != 1) throw new Error("Wrong first byte");
|
||||
if (result[size] != 2) throw new Error("Wrong second byte");
|
||||
if (result[size * 2] != 3) throw new Error("Wrong third byte");
|
||||
|
||||
result[0] = 10;
|
||||
if (first[0] != 1) throw new Error("First buffer was modified");
|
||||
|
||||
result[size] = 20;
|
||||
if (second[0] != 2) throw new Error("Second buffer was modified");
|
||||
|
||||
result[size * 2] = 30;
|
||||
if (third[0] != 3) throw new Error("Third buffer was modified");
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const chunk = Buffer.alloc(16);
|
||||
chunk.fill("3");
|
||||
const array = Array.from({ length: 100 }, () => chunk);
|
||||
bench("Buffer.concat 100 tiny chunks", () => {
|
||||
return Buffer.concat(array);
|
||||
});
|
||||
|
||||
await run();
|
||||
7
bench/snippets/crypto-randomUUID.mjs
Normal file
7
bench/snippets/crypto-randomUUID.mjs
Normal file
@@ -0,0 +1,7 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
bench("crypto.randomUUID()", () => {
|
||||
return crypto.randomUUID();
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,9 +1,9 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
// pure JS implementation will optimze this out
|
||||
// bench("new Headers", function () {
|
||||
// return new Headers();
|
||||
// });
|
||||
bench("new Headers", function () {
|
||||
return new Headers();
|
||||
});
|
||||
|
||||
var big = new Headers({
|
||||
"Content-Type": "text/plain",
|
||||
@@ -17,17 +17,128 @@ var big = new Headers({
|
||||
"X-Yet-Another-Custom-Headz": "Hello Worlda",
|
||||
});
|
||||
|
||||
// bench("Header.get", function () {
|
||||
// return big.get("Content-Type");
|
||||
// });
|
||||
bench("new Headers([])", () => {
|
||||
return new Headers([]);
|
||||
});
|
||||
|
||||
// bench("Header.set (standard)", function () {
|
||||
// return big.set("Content-Type", "text/html");
|
||||
// });
|
||||
bench("new Headers({})", () => {
|
||||
return new Headers({});
|
||||
});
|
||||
|
||||
// bench("Header.set (non-standard)", function () {
|
||||
// return big.set("X-My-Custom", "text/html123");
|
||||
// });
|
||||
bench("new Headers(object)", () => {
|
||||
return new Headers({
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": "123",
|
||||
"User-Agent": "node-fetch/1.0",
|
||||
});
|
||||
});
|
||||
|
||||
bench("new Headers(hugeObject)", () => {
|
||||
return new Headers({
|
||||
"Accept": "123",
|
||||
"Accept-Charset": "123",
|
||||
"Accept-Language": "123",
|
||||
"Accept-Encoding": "123",
|
||||
"Accept-Ranges": "123",
|
||||
"Access-Control-Allow-Credentials": "123",
|
||||
"Access-Control-Allow-Headers": "123",
|
||||
"Access-Control-Allow-Methods": "123",
|
||||
"Access-Control-Allow-Origin": "123",
|
||||
"Access-Control-Expose-Headers": "123",
|
||||
"Access-Control-Max-Age": "123",
|
||||
"Access-Control-Request-Headers": "123",
|
||||
"Access-Control-Request-Method": "123",
|
||||
"Age": "123",
|
||||
"Authorization": "123",
|
||||
"Cache-Control": "123",
|
||||
"Connection": "123",
|
||||
"Content-Disposition": "123",
|
||||
"Content-Encoding": "123",
|
||||
"Content-Language": "123",
|
||||
"Content-Length": "123",
|
||||
"Content-Location": "123",
|
||||
"Content-Security-Policy": "123",
|
||||
"Content-Security-Policy-Report-Only": "123",
|
||||
"Content-Type": "123",
|
||||
"Content-Range": "123",
|
||||
"Cookie": "123",
|
||||
"Cookie2": "123",
|
||||
"Cross-Origin-Embedder-Policy": "123",
|
||||
"Cross-Origin-Embedder-Policy-Report-Only": "123",
|
||||
"Cross-Origin-Opener-Policy": "123",
|
||||
"Cross-Origin-Opener-Policy-Report-Only": "123",
|
||||
"Cross-Origin-Resource-Policy": "123",
|
||||
"Date": "123",
|
||||
"DNT": "123",
|
||||
"Default-Style": "123",
|
||||
"ETag": "123",
|
||||
"Expect": "123",
|
||||
"Expires": "123",
|
||||
"Host": "123",
|
||||
"If-Match": "123",
|
||||
"If-Modified-Since": "123",
|
||||
"If-None-Match": "123",
|
||||
"If-Range": "123",
|
||||
"If-Unmodified-Since": "123",
|
||||
"Keep-Alive": "123",
|
||||
"Last-Event-ID": "123",
|
||||
"Last-Modified": "123",
|
||||
"Link": "123",
|
||||
"Location": "123",
|
||||
"Origin": "123",
|
||||
"Ping-From": "123",
|
||||
"Ping-To": "123",
|
||||
"Purpose": "123",
|
||||
"Pragma": "123",
|
||||
"Proxy-Authorization": "123",
|
||||
"Range": "123",
|
||||
"Referer": "123",
|
||||
"Referrer-Policy": "123",
|
||||
"Refresh": "123",
|
||||
"Report-To": "123",
|
||||
"Sec-Fetch-Dest": "123",
|
||||
"Sec-Fetch-Mode": "123",
|
||||
"Sec-WebSocket-Accept": "123",
|
||||
"Sec-WebSocket-Extensions": "123",
|
||||
"Sec-WebSocket-Key": "123",
|
||||
"Sec-WebSocket-Protocol": "123",
|
||||
"Sec-WebSocket-Version": "123",
|
||||
"Server-Timing": "123",
|
||||
"Service-Worker": "123",
|
||||
"Service-Worker-Allowed": "123",
|
||||
"Service-Worker-Navigation-Preload": "123",
|
||||
"Set-Cookie": "123",
|
||||
"Set-Cookie2": "123",
|
||||
"SourceMap": "123",
|
||||
"TE": "123",
|
||||
"Timing-Allow-Origin": "123",
|
||||
"Trailer": "123",
|
||||
"Transfer-Encoding": "123",
|
||||
"Upgrade": "123",
|
||||
"Upgrade-Insecure-Requests": "123",
|
||||
"User-Agent": "123",
|
||||
"Vary": "123",
|
||||
"Via": "123",
|
||||
"X-Content-Type-Options": "123",
|
||||
"X-DNS-Prefetch-Control": "123",
|
||||
"X-Frame-Options": "123",
|
||||
"X-SourceMap": "123",
|
||||
"X-XSS-Protection": "123",
|
||||
"X-Temp-Tablet": "123",
|
||||
});
|
||||
});
|
||||
|
||||
bench("Header.get", function () {
|
||||
return big.get("Content-Type");
|
||||
});
|
||||
|
||||
bench("Header.set (standard)", function () {
|
||||
return big.set("Content-Type", "text/html");
|
||||
});
|
||||
|
||||
bench("Header.set (non-standard)", function () {
|
||||
return big.set("X-My-Custom", "text/html123");
|
||||
});
|
||||
|
||||
if (big.toJSON)
|
||||
bench("Headers.toJSON", function () {
|
||||
@@ -42,4 +153,4 @@ bench("Object.fromEntries(headers)", function () {
|
||||
return Object.fromEntries(big);
|
||||
});
|
||||
|
||||
run();
|
||||
await run();
|
||||
|
||||
31
bench/snippets/peek-promise.mjs
Normal file
31
bench/snippets/peek-promise.mjs
Normal file
@@ -0,0 +1,31 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { peek } from "bun";
|
||||
|
||||
let pending = Bun.sleep(1000);
|
||||
let resolved = Promise.resolve(1);
|
||||
|
||||
bench("Bun.peek - pending", () => {
|
||||
return peek(pending);
|
||||
});
|
||||
|
||||
bench("Bun.peek - resolved", () => {
|
||||
return peek(resolved);
|
||||
});
|
||||
|
||||
bench("Bun.peek - non-promise", () => {
|
||||
return peek(1);
|
||||
});
|
||||
|
||||
bench("Bun.peek.status - resolved", () => {
|
||||
return peek.status(pending);
|
||||
});
|
||||
|
||||
bench("Bun.peek.status - pending", () => {
|
||||
return peek.status(resolved);
|
||||
});
|
||||
|
||||
bench("Bun.peek.status - non-promise", () => {
|
||||
return peek.status(1);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,13 +1,50 @@
|
||||
import { readdirSync } from "fs";
|
||||
import { readdirSync, readdir as readdirCb } from "fs";
|
||||
import { readdir } from "fs/promises";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { argv } from "process";
|
||||
import { fileURLToPath } from "url";
|
||||
import { relative, resolve } from "path";
|
||||
import { createHash } from "crypto";
|
||||
|
||||
const dir = argv.length > 2 ? argv[2] : "/tmp";
|
||||
let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url)));
|
||||
if (dir.includes(process.cwd())) {
|
||||
dir = relative(process.cwd(), dir);
|
||||
}
|
||||
|
||||
const count = readdirSync(dir).length;
|
||||
bench(`readdir("${dir}")`, () => {
|
||||
readdirSync(dir, { withFileTypes: true });
|
||||
const result = await readdir(dir, { recursive: true });
|
||||
const count = result.length;
|
||||
const syncCount = readdirSync(dir, { recursive: true }).length;
|
||||
|
||||
const hash = createHash("sha256").update(result.sort().join("\n")).digest("hex");
|
||||
|
||||
bench(`await readdir("${dir}", {recursive: true})`, async () => {
|
||||
await readdir(dir, { recursive: true });
|
||||
});
|
||||
|
||||
bench(`await readdir("${dir}", {recursive: true}) x 10`, async () => {
|
||||
const promises = [
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
];
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench(`await readdir("${dir}", {recursive: false})`, async () => {
|
||||
await readdir(dir, { recursive: false });
|
||||
});
|
||||
|
||||
await run();
|
||||
console.log("\n\nFor", count, "files/dirs in", dir);
|
||||
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
|
||||
|
||||
if (count !== syncCount) {
|
||||
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
|
||||
}
|
||||
|
||||
113
bench/snippets/urlsearchparams.mjs
Normal file
113
bench/snippets/urlsearchparams.mjs
Normal file
@@ -0,0 +1,113 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
// bench("new URLSearchParams({})", () => {
|
||||
// return new URLSearchParams({});
|
||||
// });
|
||||
|
||||
bench("new URLSearchParams(obj)", () => {
|
||||
return new URLSearchParams({
|
||||
"Content-Type": "text/plain",
|
||||
"Content-Length": "123",
|
||||
"User-Agent": "node-fetch/1.0",
|
||||
"Accept-Encoding": "gzip,deflate",
|
||||
"Content-Length": "0",
|
||||
"Content-Range": "bytes 0-9/10",
|
||||
});
|
||||
});
|
||||
|
||||
bench("new URLSearchParams(absurdlyHugeObject)", () => {
|
||||
return new URLSearchParams({
|
||||
"Accept": "123",
|
||||
"Accept-Charset": "123",
|
||||
"Accept-Language": "123",
|
||||
"Accept-Encoding": "123",
|
||||
"Accept-Ranges": "123",
|
||||
"Access-Control-Allow-Credentials": "123",
|
||||
"Access-Control-Allow-Headers": "123",
|
||||
"Access-Control-Allow-Methods": "123",
|
||||
"Access-Control-Allow-Origin": "123",
|
||||
"Access-Control-Expose-Headers": "123",
|
||||
"Access-Control-Max-Age": "123",
|
||||
"Access-Control-Request-Headers": "123",
|
||||
"Access-Control-Request-Method": "123",
|
||||
"Age": "123",
|
||||
"Authorization": "123",
|
||||
"Cache-Control": "123",
|
||||
"Connection": "123",
|
||||
"Content-Disposition": "123",
|
||||
"Content-Encoding": "123",
|
||||
"Content-Language": "123",
|
||||
"Content-Length": "123",
|
||||
"Content-Location": "123",
|
||||
"Content-Security-Policy": "123",
|
||||
"Content-Security-Policy-Report-Only": "123",
|
||||
"Content-Type": "123",
|
||||
"Content-Range": "123",
|
||||
"Cookie": "123",
|
||||
"Cookie2": "123",
|
||||
"Cross-Origin-Embedder-Policy": "123",
|
||||
"Cross-Origin-Embedder-Policy-Report-Only": "123",
|
||||
"Cross-Origin-Opener-Policy": "123",
|
||||
"Cross-Origin-Opener-Policy-Report-Only": "123",
|
||||
"Cross-Origin-Resource-Policy": "123",
|
||||
"Date": "123",
|
||||
"DNT": "123",
|
||||
"Default-Style": "123",
|
||||
"ETag": "123",
|
||||
"Expect": "123",
|
||||
"Expires": "123",
|
||||
"Host": "123",
|
||||
"If-Match": "123",
|
||||
"If-Modified-Since": "123",
|
||||
"If-None-Match": "123",
|
||||
"If-Range": "123",
|
||||
"If-Unmodified-Since": "123",
|
||||
"Keep-Alive": "123",
|
||||
"Last-Event-ID": "123",
|
||||
"Last-Modified": "123",
|
||||
"Link": "123",
|
||||
"Location": "123",
|
||||
"Origin": "123",
|
||||
"Ping-From": "123",
|
||||
"Ping-To": "123",
|
||||
"Purpose": "123",
|
||||
"Pragma": "123",
|
||||
"Proxy-Authorization": "123",
|
||||
"Range": "123",
|
||||
"Referer": "123",
|
||||
"Referrer-Policy": "123",
|
||||
"Refresh": "123",
|
||||
"Report-To": "123",
|
||||
"Sec-Fetch-Dest": "123",
|
||||
"Sec-Fetch-Mode": "123",
|
||||
"Sec-WebSocket-Accept": "123",
|
||||
"Sec-WebSocket-Extensions": "123",
|
||||
"Sec-WebSocket-Key": "123",
|
||||
"Sec-WebSocket-Protocol": "123",
|
||||
"Sec-WebSocket-Version": "123",
|
||||
"Server-Timing": "123",
|
||||
"Service-Worker": "123",
|
||||
"Service-Worker-Allowed": "123",
|
||||
"Service-Worker-Navigation-Preload": "123",
|
||||
"Set-Cookie": "123",
|
||||
"Set-Cookie2": "123",
|
||||
"SourceMap": "123",
|
||||
"TE": "123",
|
||||
"Timing-Allow-Origin": "123",
|
||||
"Trailer": "123",
|
||||
"Transfer-Encoding": "123",
|
||||
"Upgrade": "123",
|
||||
"Upgrade-Insecure-Requests": "123",
|
||||
"User-Agent": "123",
|
||||
"Vary": "123",
|
||||
"Via": "123",
|
||||
"X-Content-Type-Options": "123",
|
||||
"X-DNS-Prefetch-Control": "123",
|
||||
"X-Frame-Options": "123",
|
||||
"X-SourceMap": "123",
|
||||
"X-XSS-Protection": "123",
|
||||
"X-Temp-Tablet": "123",
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,7 +1,8 @@
|
||||
import { run, bench } from "mitata";
|
||||
import { Database } from "bun:sqlite";
|
||||
import { join } from "path";
|
||||
|
||||
const db = Database.open("./src/northwind.sqlite");
|
||||
const db = Database.open(join(import.meta.dir, "src", "northwind.sqlite"));
|
||||
|
||||
{
|
||||
const sql = db.prepare(`SELECT * FROM "Order"`);
|
||||
|
||||
156
build.zig
156
build.zig
@@ -1,39 +1,22 @@
|
||||
const recommended_zig_version = "0.12.0-dev.1297+a9e66ed73";
|
||||
const zig_version = @import("builtin").zig_version;
|
||||
const std = @import("std");
|
||||
|
||||
const pathRel = std.fs.path.relative;
|
||||
const builtin = @import("builtin");
|
||||
const Wyhash = @import("./src/wyhash.zig").Wyhash;
|
||||
|
||||
const zig_version = builtin.zig_version;
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
|
||||
const recommended_zig_version = "0.12.0-dev.1828+225fe6ddb";
|
||||
|
||||
var is_debug_build = false;
|
||||
fn moduleSource(comptime out: []const u8) FileSource {
|
||||
if (comptime std.fs.path.dirname(@src().file)) |base| {
|
||||
const outpath = comptime base ++ std.fs.path.sep_str ++ out;
|
||||
return FileSource.relative(outpath);
|
||||
} else {
|
||||
return FileSource.relative(out);
|
||||
}
|
||||
}
|
||||
|
||||
fn exists(path: []const u8) bool {
|
||||
_ = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
const color_map = std.ComptimeStringMap([]const u8, .{
|
||||
&.{ "black", "30m" },
|
||||
&.{ "blue", "34m" },
|
||||
&.{ "b", "1m" },
|
||||
&.{ "d", "2m" },
|
||||
&.{ "cyan", "36m" },
|
||||
&.{ "green", "32m" },
|
||||
&.{ "magenta", "35m" },
|
||||
&.{ "red", "31m" },
|
||||
&.{ "white", "37m" },
|
||||
&.{ "yellow", "33m" },
|
||||
});
|
||||
|
||||
fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: []const u8, target: anytype) !void {
|
||||
var io: *Module = brk: {
|
||||
const io: *Module = brk: {
|
||||
if (target.isDarwin()) {
|
||||
break :brk b.createModule(.{
|
||||
.source_file = FileSource.relative("src/io/io_darwin.zig"),
|
||||
@@ -63,7 +46,7 @@ fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: [
|
||||
break :brk b.createModule(.{ .source_file = FileSource.relative("src/deps/zlib.posix.zig") });
|
||||
});
|
||||
|
||||
var async_: *Module = brk: {
|
||||
const async_: *Module = brk: {
|
||||
if (target.isDarwin() or target.isLinux() or target.isFreeBSD()) {
|
||||
break :brk b.createModule(.{
|
||||
.source_file = FileSource.relative("src/async/posix_event_loop.zig"),
|
||||
@@ -90,6 +73,7 @@ const BunBuildOptions = struct {
|
||||
bindgen: bool = false,
|
||||
sizegen: bool = false,
|
||||
base_path: [:0]const u8 = "",
|
||||
tracy_callstack_depth: u16,
|
||||
|
||||
runtime_js_version: u64 = 0,
|
||||
fallback_html_version: u64 = 0,
|
||||
@@ -147,29 +131,8 @@ const BunBuildOptions = struct {
|
||||
|
||||
// relative to the prefix
|
||||
var output_dir: []const u8 = "";
|
||||
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
|
||||
var file = std.fs.cwd().openFile(filepath, .{ .optimize = .read_only }) catch |err| {
|
||||
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
|
||||
};
|
||||
file.close();
|
||||
|
||||
return filepath;
|
||||
}
|
||||
|
||||
const fmt = struct {
|
||||
pub usingnamespace @import("std").fmt;
|
||||
|
||||
pub fn hexInt(value: anytype) @TypeOf(std.fmt.fmtSliceHexLower("")) {
|
||||
return std.fmt.fmtSliceHexLower(std.mem.asBytes(&value));
|
||||
}
|
||||
|
||||
pub fn hexIntUp(value: anytype) @TypeOf(std.fmt.fmtSliceHexUpper("")) {
|
||||
return std.fmt.fmtSliceHexUpper(std.mem.asBytes(&value));
|
||||
}
|
||||
};
|
||||
|
||||
var x64 = "x64";
|
||||
var optimize: std.builtin.OptimizeMode = undefined;
|
||||
var optimize: std.builtin.OptimizeMode = .Debug;
|
||||
|
||||
const Build = std.Build;
|
||||
const CrossTarget = std.zig.CrossTarget;
|
||||
@@ -193,16 +156,16 @@ pub fn build_(b: *Build) !void {
|
||||
switch (comptime zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {
|
||||
.eq => {},
|
||||
.lt => {
|
||||
@compileError("The minimum version of Zig required to compile Bun is " ++ recommended_zig_version ++ ", found " ++ @import("builtin").zig_version_string);
|
||||
@compileError("The minimum version of Zig required to compile Bun is " ++ recommended_zig_version ++ ", found " ++ @import("builtin").zig_version_string ++ ". Please follow the instructions at https://bun.sh/docs/project/contributing. You may need to re-run `bun setup`.");
|
||||
},
|
||||
.gt => {
|
||||
const colors = std.io.getStdErr().supportsAnsiEscapeCodes();
|
||||
std.debug.print(
|
||||
"{s}WARNING:\nBun recommends Zig version '{s}', but found '{s}', build may fail...\nMake sure you installed the right version as per https://bun.sh/docs/project/contributing#install-zig\n{s}You can update to the right version using 'zigup {s}'\n\n",
|
||||
"{s}WARNING:\nBun recommends Zig version '{s}', but found '{s}', build may fail...\nMake sure you are following the instructions at https://bun.sh/docs/project/contributing\n{s}You can update to the right version using 'zigup {s}'\n\n",
|
||||
.{
|
||||
if (colors) "\x1b[1;33m" else "",
|
||||
recommended_zig_version,
|
||||
@import("builtin").zig_version_string,
|
||||
builtin.zig_version_string,
|
||||
if (colors) "\x1b[0m" else "",
|
||||
recommended_zig_version,
|
||||
},
|
||||
@@ -219,39 +182,53 @@ pub fn build_(b: *Build) !void {
|
||||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const generated_code_directory = b.option([]const u8, "generated-code", "Set the generated code directory") orelse "./build";
|
||||
var generated_code_directory = b.option([]const u8, "generated-code", "Set the generated code directory") orelse "";
|
||||
|
||||
if (generated_code_directory.len == 0) {
|
||||
generated_code_directory = b.pathFromRoot("build/codegen");
|
||||
}
|
||||
|
||||
var output_dir_buf = std.mem.zeroes([4096]u8);
|
||||
var bin_label = if (optimize == std.builtin.OptimizeMode.Debug) "packages/debug-bun-" else "packages/bun-";
|
||||
const bin_label = if (optimize == std.builtin.OptimizeMode.Debug) "packages/debug-bun-" else "packages/bun-";
|
||||
|
||||
var triplet_buf: [64]u8 = undefined;
|
||||
var os_tagname = @tagName(target.getOs().tag);
|
||||
|
||||
const arch: std.Target.Cpu.Arch = target.getCpuArch();
|
||||
|
||||
if (std.mem.eql(u8, os_tagname, "macos")) {
|
||||
os_tagname = "darwin";
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
} else if (target.isLinux()) {
|
||||
target.setGnuLibCVersion(2, 27, 0);
|
||||
var os_tagname = @tagName(target.getOs().tag);
|
||||
|
||||
switch (target.getOs().tag) {
|
||||
.macos => {
|
||||
os_tagname = "darwin";
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
|
||||
},
|
||||
.windows => {
|
||||
target.os_version_min = std.zig.CrossTarget.OsVersion{
|
||||
// Windows 1809
|
||||
// Minimum version for a syscall related to bun.sys.renameat
|
||||
// if you update this please update install.ps1
|
||||
.windows = .win10_rs5,
|
||||
};
|
||||
},
|
||||
.linux => {
|
||||
target.setGnuLibCVersion(2, 27, 0);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
std.mem.copy(
|
||||
u8,
|
||||
&triplet_buf,
|
||||
os_tagname,
|
||||
);
|
||||
var osname = triplet_buf[0..os_tagname.len];
|
||||
|
||||
@memcpy(triplet_buf[0..].ptr, os_tagname);
|
||||
const osname = triplet_buf[0..os_tagname.len];
|
||||
triplet_buf[osname.len] = '-';
|
||||
|
||||
std.mem.copy(u8, triplet_buf[osname.len + 1 ..], @tagName(target.getCpuArch()));
|
||||
@memcpy(triplet_buf[osname.len + 1 ..].ptr, @tagName(target.getCpuArch()));
|
||||
var cpuArchName = triplet_buf[osname.len + 1 ..][0..@tagName(target.getCpuArch()).len];
|
||||
std.mem.replaceScalar(u8, cpuArchName, '_', '-');
|
||||
if (std.mem.eql(u8, cpuArchName, "x86-64")) {
|
||||
std.mem.copy(u8, cpuArchName, "x64");
|
||||
@memcpy(cpuArchName.ptr, "x64");
|
||||
cpuArchName = cpuArchName[0..3];
|
||||
}
|
||||
|
||||
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
|
||||
const triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
|
||||
|
||||
const outfile_maybe = b.option([]const u8, "output-file", "target to install to");
|
||||
|
||||
@@ -350,6 +327,7 @@ pub fn build_(b: *Build) !void {
|
||||
.baseline = is_baseline,
|
||||
.bindgen = false,
|
||||
.base_path = try b.allocator.dupeZ(u8, b.pathFromRoot(".")),
|
||||
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -366,10 +344,12 @@ pub fn build_(b: *Build) !void {
|
||||
obj.target.cpu_model = .{ .explicit = &std.Target.x86.cpu.x86_64_v2 };
|
||||
} else if (arch.isX86()) {
|
||||
obj.target.cpu_model = .{ .explicit = &std.Target.x86.cpu.haswell };
|
||||
} else if (arch.isAARCH64() and target.isDarwin()) {
|
||||
obj.target.cpu_model = .{ .explicit = &std.Target.aarch64.cpu.apple_m1 };
|
||||
} else if (arch.isAARCH64() and target.isLinux()) {
|
||||
obj.target.cpu_model = .{ .explicit = &std.Target.aarch64.cpu.generic };
|
||||
} else if (arch.isAARCH64()) {
|
||||
if (target.isDarwin()) {
|
||||
obj.target.cpu_model = .{ .explicit = &std.Target.aarch64.cpu.apple_m1 };
|
||||
} else {
|
||||
obj.target.cpu_model = .{ .explicit = &std.Target.aarch64.cpu.generic };
|
||||
}
|
||||
}
|
||||
|
||||
try default_build_options.updateRuntime();
|
||||
@@ -381,8 +361,7 @@ pub fn build_(b: *Build) !void {
|
||||
max_version,
|
||||
obj.target.getCpuModel().name,
|
||||
}) catch {};
|
||||
std.io.getStdErr().writer().print("Zig {s}\n", .{@import("builtin").zig_version_string}) catch {};
|
||||
// std.io.getStdErr().writer().print("Output: {s}/{s}\n\n", .{ output_dir, bun_executable_name }) catch unreachable;
|
||||
std.io.getStdErr().writer().print("Zig v{s}\n", .{builtin.zig_version_string}) catch {};
|
||||
|
||||
defer obj_step.dependOn(&obj.step);
|
||||
|
||||
@@ -403,11 +382,11 @@ pub fn build_(b: *Build) !void {
|
||||
|
||||
// Generated Code
|
||||
// TODO: exit with a better error early if these files do not exist. it is an indication someone ran `zig build` directly without the code generators.
|
||||
obj.addModule("generated/ZigGeneratedClasses.zig", b.createModule(.{
|
||||
.source_file = .{ .path = b.fmt("{s}/ZigGeneratedClasses.zig", .{generated_code_directory}) },
|
||||
obj.addModule("ZigGeneratedClasses", b.createModule(.{
|
||||
.source_file = .{ .path = b.pathJoin(&.{ generated_code_directory, "ZigGeneratedClasses.zig" }) },
|
||||
}));
|
||||
obj.addModule("generated/ResolvedSourceTag.zig", b.createModule(.{
|
||||
.source_file = .{ .path = b.fmt("{s}/ResolvedSourceTag.zig", .{generated_code_directory}) },
|
||||
obj.addModule("ResolvedSourceTag", b.createModule(.{
|
||||
.source_file = .{ .path = b.pathJoin(&.{ generated_code_directory, "ResolvedSourceTag.zig" }) },
|
||||
}));
|
||||
|
||||
obj.linkLibC();
|
||||
@@ -415,6 +394,7 @@ pub fn build_(b: *Build) !void {
|
||||
obj.strip = false;
|
||||
obj.omit_frame_pointer = optimize != .Debug;
|
||||
obj.subsystem = .Console;
|
||||
|
||||
// Disable stack probing on x86 so we don't need to include compiler_rt
|
||||
if (target.getCpuArch().isX86() or target.isWindows()) obj.disable_stack_probing = true;
|
||||
|
||||
@@ -568,9 +548,9 @@ pub fn build_(b: *Build) !void {
|
||||
{
|
||||
const headers_step = b.step("test", "Build test");
|
||||
|
||||
var test_file = b.option([]const u8, "test-file", "Input file for test");
|
||||
var test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
|
||||
var test_filter = b.option([]const u8, "test-filter", "Filter for test");
|
||||
const test_file = b.option([]const u8, "test-file", "Input file for test");
|
||||
const test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
|
||||
const test_filter = b.option([]const u8, "test-filter", "Filter for test");
|
||||
|
||||
var headers_obj: *CompileStep = b.addTest(.{
|
||||
.root_source_file = FileSource.relative(test_file orelse "src/main.zig"),
|
||||
@@ -597,7 +577,19 @@ pub fn build_(b: *Build) !void {
|
||||
headers_obj.addOptions("build_options", default_build_options.step(b));
|
||||
}
|
||||
|
||||
b.default_step.dependOn(obj_step);
|
||||
// Running `zig build` with no arguments is almost always a mistake.
|
||||
const mistake_message = b.addSystemCommand(&.{
|
||||
"echo",
|
||||
\\
|
||||
\\error: To build Bun from source, please use `bun run setup` instead of `zig build`"
|
||||
\\
|
||||
\\If you want to build the zig code only, run:
|
||||
\\ 'zig build obj -Dgenerated-code=./build/codegen [...opts]'
|
||||
\\
|
||||
\\For more info, see https://bun.sh/docs/project/contributing
|
||||
\\
|
||||
});
|
||||
b.default_step.dependOn(&mistake_message.step);
|
||||
}
|
||||
|
||||
pub var original_make_fn: ?*const fn (step: *std.build.Step) anyerror!void = null;
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
#/usr/bin/env bash
|
||||
|
||||
_file_arguments() {
|
||||
shopt -s extglob globstar
|
||||
local extensions="${1}";
|
||||
local extensions="${1}"
|
||||
local reset=$(shopt -p globstar)
|
||||
shopt -s globstar
|
||||
|
||||
if [[ -z "${cur_word}" ]]; then
|
||||
COMPREPLY=( $(compgen -fG -X "${extensions}" -- "${cur_word}") );
|
||||
else
|
||||
COMPREPLY=( $(compgen -f -X "${extensions}" -- "${cur_word}") );
|
||||
fi
|
||||
shopt -u extglob globstar
|
||||
|
||||
$reset
|
||||
}
|
||||
|
||||
_long_short_completion() {
|
||||
@@ -43,9 +45,7 @@ _read_scripts_in_package_json() {
|
||||
local package_json_compreply;
|
||||
local matched="${BASH_REMATCH[@]:1}";
|
||||
local scripts="${matched%%\}*}";
|
||||
shopt -s extglob;
|
||||
scripts="${scripts//@(\"|\')/}";
|
||||
shopt -u extglob;
|
||||
readarray -td, scripts <<<"${scripts}";
|
||||
for completion in "${scripts[@]}"; do
|
||||
package_json_compreply+=( "${completion%:*}" );
|
||||
@@ -84,7 +84,7 @@ _bun_completions() {
|
||||
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
|
||||
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
|
||||
|
||||
PACKAGE_OPTIONS[ADD_OPTIONS_LONG]="--development --optional";
|
||||
|
||||
@@ -51,7 +51,7 @@ function __bun_last_cmd --argument-names n
|
||||
end
|
||||
|
||||
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose global
|
||||
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependenices" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
|
||||
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependencies" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
|
||||
|
||||
set -l bun_builtin_cmds dev create help bun upgrade discord run install remove add init link unlink pm x
|
||||
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add init pm x
|
||||
|
||||
@@ -406,6 +406,7 @@ _bun_run_completion() {
|
||||
'--cwd[Absolute path to resolve files & entry points from. This just changes the process cwd]:cwd' \
|
||||
'--config[Config file to load bun from (e.g. -c bunfig.toml]: :->config' \
|
||||
'-c[Config file to load bun from (e.g. -c bunfig.toml]: :->config' \
|
||||
'--env-file[Load environment variables from the specified file(s)]:env-file' \
|
||||
'--extension-order[Defaults to: .tsx,.ts,.jsx,.js,.json]:extension-order' \
|
||||
'--jsx-factory[Changes the function called when compiling JSX elements using the classic JSX runtime]:jsx-factory' \
|
||||
'--jsx-fragment[Changes the function called when compiling JSX fragments]:jsx-fragment' \
|
||||
@@ -572,6 +573,7 @@ _bun_test_completion() {
|
||||
'--cwd[Set a specific cwd]:cwd' \
|
||||
'-c[Load config(bunfig.toml)]: :->config' \
|
||||
'--config[Load config(bunfig.toml)]: :->config' \
|
||||
'--env-file[Load environment variables from the specified file(s)]:env-file' \
|
||||
'--extension-order[Defaults to: .tsx,.ts,.jsx,.js,.json]:extension-order' \
|
||||
'--jsx-factory[Changes the function called when compiling JSX elements using the classic JSX runtime]:jsx-factory' \
|
||||
'--jsx-fragment[Changes the function called when compiling JSX fragments]:jsx-fragment' \
|
||||
|
||||
@@ -78,6 +78,9 @@ subcommands:
|
||||
- name: server-bunfile
|
||||
type: string
|
||||
summary: "Use a specific .bun file for SSR in bun dev (default: node_modules.server.bun)"
|
||||
- name: env-file
|
||||
type: string
|
||||
summary: "Load environment variables from the specified file(s)"
|
||||
- name: extension-order
|
||||
type: string
|
||||
summary: "defaults to: .tsx,.ts,.jsx,.js,.json"
|
||||
@@ -118,7 +121,7 @@ subcommands:
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependencies"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
@@ -153,7 +156,7 @@ subcommands:
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependencies"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
@@ -191,7 +194,7 @@ subcommands:
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependencies"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
|
||||
@@ -91,6 +91,11 @@ RUN apk --no-cache add \
|
||||
|
||||
FROM alpine:3.18
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY --from=build /tmp/glibc.apk /tmp/
|
||||
COPY --from=build /tmp/glibc-bin.apk /tmp/
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
@@ -57,6 +57,11 @@ RUN apt-get update -qq \
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
|
||||
@@ -58,6 +58,11 @@ FROM debian:bullseye
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
|
||||
@@ -57,6 +57,11 @@ RUN apt-get update -qq \
|
||||
|
||||
FROM gcr.io/distroless/base-nossl-debian11
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
|
||||
@@ -49,7 +49,6 @@ Despite the name, it isn't an array and supports none of the array methods and o
|
||||
|
||||
```ts
|
||||
const buf = new ArrayBuffer(8);
|
||||
|
||||
buf.byteLength; // => 8
|
||||
|
||||
const slice = buf.slice(0, 4); // returns new ArrayBuffer
|
||||
|
||||
@@ -249,6 +249,42 @@ writer.unref();
|
||||
writer.ref();
|
||||
```
|
||||
|
||||
## Directories
|
||||
|
||||
Bun's implementation of `node:fs` is fast, and we haven't implemented a Bun-specific API for reading directories just yet. For now, you should use `node:fs` for working with directories in Bun.
|
||||
|
||||
### Reading directories (readdir)
|
||||
|
||||
To read a directory in Bun, use `readdir` from `node:fs`.
|
||||
|
||||
```ts
|
||||
import { readdir } from "node:fs/promises";
|
||||
|
||||
// read all the files in the current directory
|
||||
const files = await readdir(import.meta.dir);
|
||||
```
|
||||
|
||||
#### Reading directories recursively
|
||||
|
||||
To recursively read a directory in Bun, use `readdir` with `recursive: true`.
|
||||
|
||||
```ts
|
||||
import { readdir } from "node:fs/promises";
|
||||
|
||||
// read all the files in the current directory, recursively
|
||||
const files = await readdir("../", { recursive: true });
|
||||
```
|
||||
|
||||
### Creating directories (mkdir)
|
||||
|
||||
To recursively create a directory, use `mkdir` in `node:fs`:
|
||||
|
||||
```ts
|
||||
import { mkdir } from "node:fs/promises";
|
||||
|
||||
await mkdir("path/to/dir", { recursive: true });
|
||||
```
|
||||
|
||||
## Benchmarks
|
||||
|
||||
The following is a 3-line implementation of the Linux `cat` command.
|
||||
@@ -300,7 +336,7 @@ interface BunFile {
|
||||
readonly type: string;
|
||||
|
||||
text(): Promise<string>;
|
||||
stream(): Promise<ReadableStream>;
|
||||
stream(): ReadableStream;
|
||||
arrayBuffer(): Promise<ArrayBuffer>;
|
||||
json(): Promise<any>;
|
||||
writer(params: { highWaterMark?: number }): FileSink;
|
||||
|
||||
@@ -46,7 +46,7 @@ router.match("/settings?foo=bar");
|
||||
kind: "dynamic",
|
||||
name: "/settings",
|
||||
pathname: "/settings?foo=bar",
|
||||
src: "https://mydomain.com/_next/static/pages/settings.tsx"
|
||||
src: "https://mydomain.com/_next/static/pages/settings.tsx",
|
||||
query: {
|
||||
foo: "bar"
|
||||
}
|
||||
@@ -64,7 +64,7 @@ router.match("/blog/my-cool-post");
|
||||
kind: "dynamic",
|
||||
name: "/blog/[slug]",
|
||||
pathname: "/blog/my-cool-post",
|
||||
src: "https://mydomain.com/_next/static/pages/blog/[slug].tsx"
|
||||
src: "https://mydomain.com/_next/static/pages/blog/[slug].tsx",
|
||||
params: {
|
||||
slug: "my-cool-post"
|
||||
}
|
||||
|
||||
157
docs/api/glob.md
Normal file
157
docs/api/glob.md
Normal file
@@ -0,0 +1,157 @@
|
||||
Bun includes a fast native implementation of file globbing.
|
||||
|
||||
## Quickstart
|
||||
|
||||
**Scan a directory for files matching `*.ts`**:
|
||||
|
||||
```ts
|
||||
import { Glob } from "bun";
|
||||
|
||||
const glob = new Glob("*.ts");
|
||||
|
||||
// Scans the current working directory and each of its sub-directories recursively
|
||||
for await (const file of glob.scan(".")) {
|
||||
console.log(file); // => "index.ts"
|
||||
}
|
||||
```
|
||||
|
||||
**Match a string against a glob pattern**:
|
||||
|
||||
```ts
|
||||
import { Glob } from "bun";
|
||||
|
||||
const glob = new Glob("*.ts");
|
||||
|
||||
glob.match("index.ts"); // => true
|
||||
glob.match("index.js"); // => false
|
||||
```
|
||||
|
||||
`Glob` is a class which implements the following interface:
|
||||
|
||||
```ts
|
||||
class Glob {
|
||||
scan(root: string | ScanOptions): AsyncIterable<string>;
|
||||
scanSync(root: string | ScanOptions): Iterable<string>;
|
||||
|
||||
match(path: string): boolean;
|
||||
}
|
||||
|
||||
interface ScanOptions {
|
||||
/**
|
||||
* The root directory to start matching from. Defaults to `process.cwd()`
|
||||
*/
|
||||
cwd?: string;
|
||||
|
||||
/**
|
||||
* Allow patterns to match entries that begin with a period (`.`).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
dot?: boolean;
|
||||
|
||||
/**
|
||||
* Return the absolute path for entries.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
absolute?: boolean;
|
||||
|
||||
/**
|
||||
* Indicates whether to traverse descendants of symbolic link directories.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
followSymlinks?: boolean;
|
||||
|
||||
/**
|
||||
* Throw an error when symbolic link is broken
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
throwErrorOnBrokenSymlink?: boolean;
|
||||
|
||||
/**
|
||||
* Return only files.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
onlyFiles?: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Glob Patterns
|
||||
|
||||
Bun supports the following glob patterns:
|
||||
|
||||
### `?` - Match any single character
|
||||
|
||||
```ts
|
||||
const glob = new Glob("???.ts");
|
||||
glob.match("foo.ts"); // => true
|
||||
glob.match("foobar.ts"); // => false
|
||||
```
|
||||
|
||||
### `*` - Matches zero or more characters, except for path separators (`/` or `\`)
|
||||
|
||||
```ts
|
||||
const glob = new Glob("*.ts");
|
||||
glob.match("index.ts"); // => true
|
||||
glob.match("src/index.ts"); // => false
|
||||
```
|
||||
|
||||
### `**` - Match any number of characters including `/`
|
||||
|
||||
```ts
|
||||
const glob = new Glob("**/*.ts");
|
||||
glob.match("index.ts"); // => true
|
||||
glob.match("src/index.ts"); // => true
|
||||
glob.match("src/index.js"); // => false
|
||||
```
|
||||
|
||||
### `[ab]` - Matches one of the characters contained in the brackets, as well as character ranges
|
||||
|
||||
```ts
|
||||
const glob = new Glob("ba[rz].ts");
|
||||
glob.match("bar.ts"); // => true
|
||||
glob.match("baz.ts"); // => true
|
||||
glob.match("bat.ts"); // => false
|
||||
```
|
||||
|
||||
You can use character ranges (e.g `[0-9]`, `[a-z]`) as well as the negation operators `^` or `!` to match anything _except_ the characters contained within the braces (e.g `[^ab]`, `[!a-z]`)
|
||||
|
||||
```ts
|
||||
const glob = new Glob("ba[a-z][0-9][^4-9].ts");
|
||||
glob.match("bar01.ts"); // => true
|
||||
glob.match("baz83.ts"); // => true
|
||||
glob.match("bat22.ts"); // => true
|
||||
glob.match("bat24.ts"); // => false
|
||||
glob.match("ba0a8.ts"); // => false
|
||||
```
|
||||
|
||||
### `{a,b,c}` - Match any of the given patterns
|
||||
|
||||
```ts
|
||||
const glob = new Glob("{a,b,c}.ts");
|
||||
glob.match("a.ts"); // => true
|
||||
glob.match("b.ts"); // => true
|
||||
glob.match("c.ts"); // => true
|
||||
glob.match("d.ts"); // => false
|
||||
```
|
||||
|
||||
These match patterns can be deeply nested (up to 10 levels), and contain any of the wildcards from above.
|
||||
|
||||
### `!` - Negates the result at the start of a pattern
|
||||
|
||||
```ts
|
||||
const glob = new Glob("!index.ts");
|
||||
glob.match("index.ts"); // => false
|
||||
glob.match("foo.ts"); // => true
|
||||
```
|
||||
|
||||
### `\` - Escapes any of the special characters above
|
||||
|
||||
```ts
|
||||
const glob = new Glob("\\!index.ts");
|
||||
glob.match("!index.ts"); // => true
|
||||
glob.match("index.ts"); // => false
|
||||
```
|
||||
@@ -171,14 +171,14 @@ Optionally, you can override the trusted CA certificates by passing a value for
|
||||
});
|
||||
```
|
||||
|
||||
To override Diffie-Helman parameters:
|
||||
To override Diffie-Hellman parameters:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
// ...
|
||||
tls: {
|
||||
// other config
|
||||
dhParamsFile: "/path/to/dhparams.pem", // path to Diffie Helman parameters
|
||||
dhParamsFile: "/path/to/dhparams.pem", // path to Diffie Hellman parameters
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
@@ -23,6 +23,11 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.dirname`
|
||||
- An alias to `import.meta.dir`, for Node.js compatibility
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.file`
|
||||
- The name of the current file, e.g. `index.tsx`
|
||||
|
||||
@@ -33,11 +38,26 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.filename`
|
||||
- An alias to `import.meta.path`, for Node.js compatibility
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.url`
|
||||
- A string url to the current file, e.g. `file:///path/to/project/index.tx`
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.main`
|
||||
- `boolean` Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.env`
|
||||
- An alias to `process.env`.
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.resolve{Sync}`
|
||||
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to an absolute path. While file would be imported if the specifier were imported from this file?
|
||||
|
||||
|
||||
@@ -57,6 +57,23 @@ import { Database } from "bun:sqlite";
|
||||
const db = new Database("mydb.sqlite", { create: true });
|
||||
```
|
||||
|
||||
### Load via ES module import
|
||||
|
||||
You can also use an import attribute to load a database.
|
||||
|
||||
```ts
|
||||
import db from "./mydb.sqlite" with {"type": "sqlite"};
|
||||
|
||||
console.log(db.query("select * from users LIMIT 1").get());
|
||||
```
|
||||
|
||||
This is equivalent to the following:
|
||||
|
||||
```ts
|
||||
import { Database } from "bun:sqlite";
|
||||
const db = new Database("./mydb.sqlite");
|
||||
```
|
||||
|
||||
### `.close()`
|
||||
|
||||
To close a database:
|
||||
|
||||
@@ -73,10 +73,11 @@ sink.end();
|
||||
// ArrayBuffer(5) [ 104, 101, 108, 108, 111 ]
|
||||
```
|
||||
|
||||
To instead retrieve the data as a `Uint8Array`, pass the `asUint8Array` option to the constructor.
|
||||
To instead retrieve the data as a `Uint8Array`, pass the `asUint8Array` option to the `start` method.
|
||||
|
||||
```ts-diff
|
||||
const sink = new Bun.ArrayBufferSink({
|
||||
const sink = new Bun.ArrayBufferSink();
|
||||
sink.start({
|
||||
+ asUint8Array: true
|
||||
});
|
||||
|
||||
@@ -103,7 +104,8 @@ sink.end();
|
||||
Once `.end()` is called, no more data can be written to the `ArrayBufferSink`. However, in the context of buffering a stream, it's useful to continuously write data and periodically `.flush()` the contents (say, into a `WriteableStream`). To support this, pass `stream: true` to the constructor.
|
||||
|
||||
```ts
|
||||
const sink = new Bun.ArrayBufferSink({
|
||||
const sink = new Bun.ArrayBufferSink();
|
||||
sink.start({
|
||||
stream: true,
|
||||
});
|
||||
|
||||
@@ -124,7 +126,8 @@ The `.flush()` method returns the buffered data as an `ArrayBuffer` (or `Uint8Ar
|
||||
To manually set the size of the internal buffer in bytes, pass a value for `highWaterMark`:
|
||||
|
||||
```ts
|
||||
const sink = new Bun.ArrayBufferSink({
|
||||
const sink = new Bun.ArrayBufferSink();
|
||||
sink.start({
|
||||
highWaterMark: 1024 * 1024, // 1 MB
|
||||
});
|
||||
```
|
||||
|
||||
@@ -195,7 +195,10 @@ socket.write("hello");
|
||||
To simplify this for now, consider using Bun's `ArrayBufferSink` with the `{stream: true}` option:
|
||||
|
||||
```ts
|
||||
const sink = new ArrayBufferSink({ stream: true, highWaterMark: 1024 });
|
||||
import { ArrayBufferSink } from "bun";
|
||||
|
||||
const sink = new ArrayBufferSink();
|
||||
sink.start({ stream: true, highWaterMark: 1024 });
|
||||
|
||||
sink.write("h");
|
||||
sink.write("e");
|
||||
@@ -204,10 +207,11 @@ sink.write("l");
|
||||
sink.write("o");
|
||||
|
||||
queueMicrotask(() => {
|
||||
var data = sink.flush();
|
||||
if (!socket.write(data)) {
|
||||
const data = sink.flush();
|
||||
const wrote = socket.write(data);
|
||||
if (wrote < data.byteLength) {
|
||||
// put it back in the sink if the socket is full
|
||||
sink.write(data);
|
||||
sink.write(data.subarray(wrote));
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
@@ -378,10 +378,10 @@ Decompresses a `Uint8Array` using zlib's GUNZIP algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100)); // Buffer extends Uint8Array
|
||||
const compressed = Bun.gunzipSync(buf);
|
||||
const compressed = Bun.gzipSync(buf);
|
||||
|
||||
const dec = new TextDecoder();
|
||||
const uncompressed = Bun.inflateSync(compressed);
|
||||
const uncompressed = Bun.gunzipSync(compressed);
|
||||
dec.decode(uncompressed);
|
||||
// => "hellohellohello..."
|
||||
```
|
||||
|
||||
@@ -245,11 +245,35 @@ The `.send(message)` method of `ServerWebSocket` returns a `number` indicating t
|
||||
|
||||
This gives you better control over backpressure in your server.
|
||||
|
||||
## Connect to a `Websocket` server
|
||||
### Timeouts and limits
|
||||
|
||||
{% callout %}
|
||||
**🚧** — The `WebSocket` client still does not pass the full [Autobahn test suite](https://github.com/crossbario/autobahn-testsuite) and should not be considered ready for production.
|
||||
{% /callout %}
|
||||
By default, Bun will close a WebSocket connection if it is idle for 120 seconds. This can be configured with the `idleTimeout` parameter.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
fetch(req, server) {}, // upgrade logic
|
||||
websocket: {
|
||||
idleTimeout: 60, // 60 seconds
|
||||
|
||||
// ...
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Bun will also close a WebSocket connection if it receives a message that is larger than 16 MB. This can be configured with the `maxPayloadLength` parameter.
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
fetch(req, server) {}, // upgrade logic
|
||||
websocket: {
|
||||
maxPayloadLength: 1024 * 1024, // 1 MB
|
||||
|
||||
// ...
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Connect to a `Websocket` server
|
||||
|
||||
Bun implements the `WebSocket` class. To create a WebSocket client that connects to a `ws://` or `wss://` server, create an instance of `WebSocket`, as you would in the browser.
|
||||
|
||||
@@ -300,6 +324,14 @@ namespace Bun {
|
||||
close?: (ws: ServerWebSocket) => void;
|
||||
error?: (ws: ServerWebSocket, error: Error) => void;
|
||||
drain?: (ws: ServerWebSocket) => void;
|
||||
|
||||
maxPayloadLength?: number; // default: 16 * 1024 * 1024 = 16 MB
|
||||
idleTimeout?: number; // default: 120 (seconds)
|
||||
backpressureLimit?: number; // default: 1024 * 1024 = 1 MB
|
||||
closeOnBackpressureLimit?: boolean; // default: false
|
||||
sendPings?: boolean; // default: true
|
||||
publishToSelf?: boolean; // default: false
|
||||
|
||||
perMessageDeflate?:
|
||||
| boolean
|
||||
| {
|
||||
|
||||
@@ -32,6 +32,25 @@ All imported files and packages are bundled into the executable, along with a co
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## SQLite
|
||||
|
||||
You can use `bun:sqlite` imports with `bun build --compile`.
|
||||
|
||||
By default, the database is resolved relative to the current working directory of the process.
|
||||
|
||||
```js
|
||||
import db from './my.db' with {type: "sqlite"};
|
||||
|
||||
console.log(db.query("select * from users LIMIT 1").get());
|
||||
```
|
||||
|
||||
That means if the executable is located at `/usr/bin/hello`, the user's terminal is located at `/home/me/Desktop`, it will look for `/home/me/Desktop/my.db`.
|
||||
|
||||
```
|
||||
$ cd /home/me/Desktop
|
||||
$ ./hello
|
||||
```
|
||||
|
||||
## Embedding files
|
||||
|
||||
Standalone executables support embedding files.
|
||||
@@ -55,6 +74,30 @@ You may need to specify a `--loader` for it to be treated as a `"file"` loader (
|
||||
|
||||
Embedded files can be read using `Bun.file`'s functions or the Node.js `fs.readFile` function (in `"node:fs"`).
|
||||
|
||||
### Embedding SQLite databases
|
||||
|
||||
If your application wants to embed a SQLite database, set `type: "sqlite"` in the import attribute and the `embed` attribute to `"true"`.
|
||||
|
||||
```js
|
||||
import myEmbeddedDb from "./my.db" with {type: "sqlite", embed: "true"};
|
||||
|
||||
console.log(myEmbeddedDb.query("select * from users LIMIT 1").get());
|
||||
```
|
||||
|
||||
This database is read-write, but all changes are lost when the executable exits (since it's stored in memory).
|
||||
|
||||
### Embedding N-API Addons
|
||||
|
||||
As of Bun v1.0.23, you can embed `.node` files into executables.
|
||||
|
||||
```js
|
||||
const addon = require("./addon.node");
|
||||
|
||||
console.log(addon.hello());
|
||||
```
|
||||
|
||||
Unfortunately, if you're using `@mapbox/node-pre-gyp` or other similar tools, you'll need to make sure the `.node` file is directly required or it won't bundle correctly.
|
||||
|
||||
## Minification
|
||||
|
||||
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
|
||||
|
||||
@@ -276,15 +276,15 @@ const result = await Bun.build({
|
||||
entrypoints: ["./index.ts"],
|
||||
});
|
||||
|
||||
for (const result of result.outputs) {
|
||||
for (const res of result.outputs) {
|
||||
// Can be consumed as blobs
|
||||
await result.text();
|
||||
await res.text();
|
||||
|
||||
// Bun will set Content-Type and Etag headers
|
||||
new Response(result);
|
||||
new Response(res);
|
||||
|
||||
// Can be written manually, but you should use `outdir` in this case.
|
||||
Bun.write(path.join("out", result.path), result);
|
||||
Bun.write(path.join("out", res.path), res);
|
||||
}
|
||||
```
|
||||
|
||||
@@ -337,10 +337,6 @@ Depending on the target, Bun will apply different module resolution rules and op
|
||||
|
||||
{% /table %}
|
||||
|
||||
{% callout %}
|
||||
|
||||
{% /callout %}
|
||||
|
||||
### `format`
|
||||
|
||||
Specifies the module format to be used in the generated bundles.
|
||||
@@ -972,8 +968,6 @@ By specifying `.` as `root`, the generated file structure will look like this:
|
||||
|
||||
A prefix to be appended to any import paths in bundled code.
|
||||
|
||||
<!-- $ bun build ./index.tsx --outdir ./out --public-path https://cdn.example.com -->
|
||||
|
||||
In many cases, generated bundles will contain no `import` statements. After all, the goal of bundling is to combine all of the code into a single file. However there are a number of cases with the generated bundles will contain `import` statements.
|
||||
|
||||
- **Asset imports** — When importing an unrecognized file type like `*.svg`, the bundler defers to the [`file` loader](/docs/bundler/loaders#file), which copies the file into `outdir` as is. The import is converted into a variable
|
||||
@@ -1011,7 +1005,7 @@ await Bun.build({
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
n/a
|
||||
$ bun build ./index.tsx --outdir ./out --public-path https://cdn.example.com/
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -1052,7 +1046,7 @@ $ bun build ./index.tsx --outdir ./out --define 'STRING="value"' --define "neste
|
||||
|
||||
### `loader`
|
||||
|
||||
A map of file extensions to [built-in loader names](https://bun.sh/docs/bundler/loaders#built-in-loaders). This can be used to quickly customize how certain file files are loaded.
|
||||
A map of file extensions to [built-in loader names](https://bun.sh/docs/bundler/loaders#built-in-loaders). This can be used to quickly customize how certain files are loaded.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
|
||||
@@ -171,6 +171,31 @@ console.log(addon);
|
||||
|
||||
In the bundler, `.node` files are handled using the [`file`](#file) loader.
|
||||
|
||||
### `sqlite`
|
||||
|
||||
**SQLite loader**. `with { "type": "sqlite" }` import attribute
|
||||
|
||||
In the runtime and bundler, SQLite databases can be directly imported. This will load the database using [`bun:sqlite`](/docs/api/sqlite.md).
|
||||
|
||||
```ts
|
||||
import db from "./my.db" with {type: "sqlite"};
|
||||
```
|
||||
|
||||
This is only supported when the `target` is `bun`.
|
||||
|
||||
By default, the database is external to the bundle (so that you can potentially use a database loaded elsewhere), so the database file on-disk won't be bundled into the final output.
|
||||
|
||||
You can change this behavior with the `"embed"` attribute:
|
||||
|
||||
```ts
|
||||
// embed the database into the bundle
|
||||
import db from "./my.db" with {type: "sqlite", embed: "true"};
|
||||
```
|
||||
|
||||
When using a [standalone executable](/docs/bundler/executables), the database is embedded into the single-file executable.
|
||||
|
||||
Otherwise, the database to embed is copied into the `outdir` with a hashed filename.
|
||||
|
||||
### `file`
|
||||
|
||||
**File loader**. Default for all unrecognized file types.
|
||||
|
||||
@@ -119,7 +119,7 @@ export function returnFalse() {
|
||||
}
|
||||
```
|
||||
|
||||
...then bundling the following file will produce an empty bundle.
|
||||
...then bundling the following file will produce an empty bundle, provided that the minify syntax option is enabled.
|
||||
|
||||
```ts
|
||||
import {returnFalse} from './returnFalse.ts' with { type: 'macro' };
|
||||
|
||||
@@ -244,7 +244,7 @@ ELSE IF local template
|
||||
5. Auto-detect the npm client, preferring `pnpm`, `yarn` (v1), and lastly `npm`
|
||||
6. Run any tasks defined in `"bun-create": { "preinstall" }` with the npm client
|
||||
7. Run `${npmClient} install` unless `--no-install` is passed OR no dependencies are in package.json
|
||||
8. Run any tasks defined in `"bun-create": { "preinstall" }` with the npm client
|
||||
8. Run any tasks defined in `"bun-create": { "postinstall" }` with the npm client
|
||||
9. Run `git init; git add -A .; git commit -am "Initial Commit";`
|
||||
|
||||
- Rename `gitignore` to `.gitignore`. NPM automatically removes `.gitignore` files from appearing in packages.
|
||||
|
||||
@@ -62,6 +62,9 @@ dev = true
|
||||
# Install peerDependencies (default: true)
|
||||
peer = true
|
||||
|
||||
# Max number of concurrent lifecycle scripts (default: (cpu count or GOMAXPROCS) x2)
|
||||
concurrentScripts = 16
|
||||
|
||||
# When using `bun install -g`, install packages here
|
||||
globalDir = "~/.bun/install/global"
|
||||
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
Bundling is currently an important mechanism for building complex web apps.
|
||||
|
||||
Modern apps typically consist of a large number of files and package dependencies. Despite the fact that modern browsers support [ES Module](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules) imports, it's still too slow to fetch each file via individual HTTP requests. _Bundling_ is the process of concatenating several source files into a single large file that can be loaded in a single request.
|
||||
|
||||
{% callout %}
|
||||
**On bundling** — Despite recent advances like [`modulepreload`](https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/rel/modulepreload) and [HTTP/3](https://en.wikipedia.org/wiki/HTTP/3), bundling is still the most performant approach.
|
||||
{% /callout %}
|
||||
|
||||
## Bundling your app
|
||||
|
||||
Bun's approach to bundling is a little different from other bundlers. Start by passing your app's entrypoint to `bun bun`.
|
||||
|
||||
```bash
|
||||
$ bun bun ./app.js
|
||||
```
|
||||
|
||||
Your entrypoint can be any `js|jsx|ts|tsx|html` file. With this file as a starting point, Bun will construct a graph of imported files and packages, transpile everything, and generate a file called `node_modules.bun`.
|
||||
|
||||
## What is `.bun`?
|
||||
|
||||
{% callout %}
|
||||
**Note** — [This format may change soon](https://github.com/oven-sh/bun/issues/121)
|
||||
{% /callout %}
|
||||
|
||||
A `.bun` file contains the pre-transpiled source code of your application, plus a bunch of binary-encoded metadata about your application's structure. as a contains:
|
||||
|
||||
- all the bundled source code
|
||||
- all the bundled source code metadata
|
||||
- project metadata & configuration
|
||||
|
||||
Here are some of the questions `.bun` files answer:
|
||||
|
||||
- when I import `react/index.js`, where in the `.bun` is the code for that? (not resolving, just the code)
|
||||
- what modules of a package are used?
|
||||
- what framework is used? (e.g., Next.js)
|
||||
- where is the routes directory?
|
||||
- how big is each imported dependency?
|
||||
- what is the hash of the bundle’s contents? (for etags)
|
||||
- what is the name & version of every npm package exported in this bundle?
|
||||
- what modules from which packages are used in this project? ("project" is defined as all the entry points used to generate the .bun)
|
||||
|
||||
All in one file.
|
||||
|
||||
It’s a little like a build cache, but designed for reuse across builds.
|
||||
|
||||
{% details summary="Position-independent code" %}
|
||||
|
||||
From a design perspective, the most important part of the `.bun` format is how code is organized. Each module is exported by a hash like this:
|
||||
|
||||
```js
|
||||
// preact/dist/preact.module.js
|
||||
export var $eb6819b = $$m({
|
||||
"preact/dist/preact.module.js": (module, exports) => {
|
||||
let n, l, u, i, t, o, r, f, e = {}, c = [], s = /acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i;
|
||||
// ... rest of code
|
||||
```
|
||||
|
||||
This makes bundled modules [position-independent](https://en.wikipedia.org/wiki/Position-independent_code). In theory, one could import only the exact modules in-use without reparsing code and without generating a new bundle. One bundle can dynamically become many bundles comprising only the modules in use on the webpage. Thanks to the metadata with the byte offsets, a web server can send each module to browsers [zero-copy](https://en.wikipedia.org/wiki/Zero-copy) using [sendfile](https://man7.org/linux/man-pages/man2/sendfile.2.html). Bun itself is not quite this smart yet, but these optimizations would be useful in production and potentially very useful for React Server Components.
|
||||
|
||||
To see the schema inside, have a look at [`JavascriptBundleContainer`](./src/api/schema.d.ts#:~:text=export%20interface-,JavascriptBundleContainer,-%7B). You can find JavaScript bindings to read the metadata in [src/api/schema.js](./src/api/schema.js). This is not really an API yet. It’s missing the part where it gets the binary data from the bottom of the file. Someday, I want this to be usable by other tools too.
|
||||
{% /details %}
|
||||
|
||||
## Where is the code?
|
||||
|
||||
`.bun` files are marked as executable.
|
||||
|
||||
To print out the code, run `./node_modules.bun` in your terminal or run `bun ./path-to-node_modules.bun`.
|
||||
|
||||
Here is a copy-pastable example:
|
||||
|
||||
```bash
|
||||
$ ./node_modules.bun > node_modules.js
|
||||
```
|
||||
|
||||
This works because every `.bun` file starts with this:
|
||||
|
||||
```
|
||||
#!/usr/bin/env bun
|
||||
```
|
||||
|
||||
To deploy to production with Bun, you’ll want to get the code from the `.bun` file and stick that somewhere your web server can find it (or if you’re using Vercel or a Rails app, in a `public` folder).
|
||||
|
||||
Note that `.bun` is a binary file format, so just opening it in VSCode or vim might render strangely.
|
||||
|
||||
## Advanced
|
||||
|
||||
By default, `bun bun` only bundles external dependencies that are `import`ed or `require`d in either app code or another external dependency. An "external dependency" is defined as, "A JavaScript-like file that has `/node_modules/` in the resolved file path and a corresponding `package.json`".
|
||||
|
||||
To force Bun to bundle packages which are not located in a `node_modules` folder (i.e., the final, resolved path following all symlinks), add a `bun` section to the root project’s `package.json` with `alwaysBundle` set to an array of package names to always bundle. Here’s an example:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "my-package-name-in-here",
|
||||
"bun": {
|
||||
"alwaysBundle": ["@mybigcompany/my-workspace-package"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Bundled dependencies are not eligible for Hot Module Reloading. The code is served to browsers & Bun.js verbatim. But, in the future, it may be sectioned off into only parts of the bundle being used. That’s possible in the current version of the `.bun` file (so long as you know which files are necessary), but it’s not implemented yet. Longer-term, it will include all `import` and `export` of each module inside.
|
||||
|
||||
## What is the module ID hash?
|
||||
|
||||
The `$eb6819b` hash used here:
|
||||
|
||||
```js
|
||||
export var $eb6819b = $$m({
|
||||
```
|
||||
|
||||
Is generated like this:
|
||||
|
||||
1. Murmur3 32-bit hash of `package.name@package.version`. This is the hash uniquely identifying the npm package.
|
||||
2. Wyhash 64 of the `package.hash` + `package_path`. `package_path` means "relative to the root of the npm package, where is the module imported?". For example, if you imported `react/jsx-dev-runtime.js`, the `package_path` is `jsx-dev-runtime.js`. `react-dom/cjs/react-dom.development.js` would be `cjs/react-dom.development.js`
|
||||
3. Truncate the hash generated above to a `u32`
|
||||
|
||||
The implementation details of this module ID hash will vary between versions of Bun. The important part is the metadata contains the module IDs, the package paths, and the package hashes, so it shouldn’t really matter in practice if other tooling wants to make use of any of this.
|
||||
@@ -60,6 +60,12 @@ To tell Bun to allow lifecycle scripts for a particular package, add the package
|
||||
|
||||
Then re-install the package. Bun will read this field and run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
Lifecycle scripts will run in parallel during installation. To adjust the maximum number of concurrent scripts, use the `--concurrent-scripts` flag. The default is two times the reported cpu count or GOMAXPROCS.
|
||||
|
||||
```bash
|
||||
$ bun install --concurrent-scripts 5
|
||||
```
|
||||
|
||||
## Workspaces
|
||||
|
||||
Bun supports `"workspaces"` in package.json. For complete documentation refer to [Package manager > Workspaces](/docs/install/workspaces).
|
||||
@@ -172,6 +178,9 @@ frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
@@ -36,11 +36,3 @@ In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencie
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To _unregister_ a local package, navigate to the package's root directory and run `bun unlink`.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/cool-pkg
|
||||
$ bun unlink
|
||||
bun unlink v1.x (7416672e)
|
||||
```
|
||||
|
||||
@@ -47,6 +47,12 @@ To filter by _test name_, use the `-t`/`--test-name-pattern` flag.
|
||||
$ bun test --test-name-pattern addition
|
||||
```
|
||||
|
||||
To run a specific file in the test runner, make sure the path starts with `./` or `/` to distinguish it from a filter name.
|
||||
|
||||
```bash
|
||||
$ bun test ./test/specific-file.test.ts
|
||||
```
|
||||
|
||||
The test runner runs all tests in a single process. It loads all `--preload` scripts (see [Lifecycle](/docs/test/lifecycle) for details), then runs all tests. If a test fails, the test runner will exit with a non-zero exit code.
|
||||
|
||||
## Timeouts
|
||||
|
||||
7
docs/cli/unlink.md
Normal file
7
docs/cli/unlink.md
Normal file
@@ -0,0 +1,7 @@
|
||||
Use `bun unlink` in the root directory to unregister a local package.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/cool-pkg
|
||||
$ bun unlink
|
||||
bun unlink v1.x (7416672e)
|
||||
```
|
||||
@@ -4,4 +4,14 @@ To update all dependencies to the latest version _that's compatible with the ver
|
||||
$ bun update
|
||||
```
|
||||
|
||||
This will not edit your `package.json`. There's currently no command to force-update all dependencies to the latest version regardless version ranges.
|
||||
## `--force`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `-f`
|
||||
{% /callout %}
|
||||
|
||||
By default, Bun respects the version range defined in your package.json. To ignore this and update to the latest version, you can pass in the `force` flag.
|
||||
|
||||
```sh
|
||||
$ bun update --force
|
||||
```
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Convert a Uint8Array to a string
|
||||
name: Convert a DataView to a string
|
||||
---
|
||||
|
||||
If a [`DataView`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/DataView) contains ASCII-encoded text, you can convert it to a string using the [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder) class.
|
||||
|
||||
@@ -7,7 +7,7 @@ Bun implements the Web-standard [`TextDecoder`](https://developer.mozilla.org/en
|
||||
```ts
|
||||
const arr = new Uint8Array([104, 101, 108, 108, 111]);
|
||||
const decoder = new TextDecoder();
|
||||
const str = decoder.decode(buf);
|
||||
const str = decoder.decode(arr);
|
||||
// => "hello"
|
||||
```
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ RUN cd /temp/prod && bun install --frozen-lockfile --production
|
||||
|
||||
# copy node_modules from temp directory
|
||||
# then copy all (non-ignored) project files into the image
|
||||
FROM install AS prerelease
|
||||
FROM base AS prerelease
|
||||
COPY --from=install /temp/dev/node_modules node_modules
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ Open that file and paste the following contents.
|
||||
```txt
|
||||
module default {
|
||||
type Movie {
|
||||
title: str;
|
||||
required title: str;
|
||||
releaseYear: int64;
|
||||
}
|
||||
};
|
||||
@@ -109,7 +109,7 @@ Applied m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq (00001.edgeql)
|
||||
|
||||
---
|
||||
|
||||
With out schema applied, let's execute some queries using EdgeDB's JavaScript client library. We'll install the client library and EdgeDB's codegen CLI, and create a `seed.ts`.file.
|
||||
With our schema applied, let's execute some queries using EdgeDB's JavaScript client library. We'll install the client library and EdgeDB's codegen CLI, and create a `seed.ts`.file.
|
||||
|
||||
```sh
|
||||
$ bun add edgedb
|
||||
@@ -121,7 +121,7 @@ $ touch seed.ts
|
||||
|
||||
Paste the following code into `seed.ts`.
|
||||
|
||||
The client auto-connects to the database. We insert a couple movies using the `.execute()` method.
|
||||
The client auto-connects to the database. We insert a couple movies using the `.execute()` method. We will use EdgeQL's `for` expression to turn this bulk insert into a single optimized query.
|
||||
|
||||
```ts
|
||||
import { createClient } from "edgedb";
|
||||
@@ -129,10 +129,13 @@ import { createClient } from "edgedb";
|
||||
const client = createClient();
|
||||
|
||||
const INSERT_MOVIE = `
|
||||
insert Movie {
|
||||
title := <str>$title,
|
||||
releaseYear := <int64>$year,
|
||||
}
|
||||
with movies := <array<tuple<title: str, year: int64>>>$movies
|
||||
for movie in array_unpack(movies) union (
|
||||
insert Movie {
|
||||
title := movie.title,
|
||||
releaseYear := movie.year,
|
||||
}
|
||||
)
|
||||
`;
|
||||
|
||||
const movies = [
|
||||
@@ -141,9 +144,7 @@ const movies = [
|
||||
{ title: "The Matrix Revolutions", year: 2003 },
|
||||
];
|
||||
|
||||
for (const movie of movies) {
|
||||
await client.execute(INSERT_MOVIE, movie);
|
||||
}
|
||||
await client.execute(INSERT_MOVIE, { movies });
|
||||
|
||||
console.log(`Seeding complete.`);
|
||||
process.exit();
|
||||
|
||||
@@ -21,7 +21,8 @@ $ bun init
|
||||
Then install the Prisma CLI (`prisma`) and Prisma Client (`@prisma/client`) as dependencies.
|
||||
|
||||
```bash
|
||||
$ bun add prisma @prisma/client
|
||||
$ bun add -d prisma
|
||||
$ bun add @prisma/client
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -14,5 +14,6 @@ const server = Bun.serve({
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Listening on localhost: ${server.port}`);
|
||||
console.log(`Listening on ${server.url}`);
|
||||
|
||||
```
|
||||
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# ...
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
+ - uses: oven-sh/setup-bun@v1
|
||||
|
||||
# run any `bun` or `bunx` command
|
||||
|
||||
@@ -5,7 +5,7 @@ name: Add a trusted dependency
|
||||
Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts for installed dependencies, such as `postinstall` and `node-gyp` builds. These scripts represent a potential security risk, as they can execute arbitrary code on your machine.
|
||||
|
||||
{% callout %}
|
||||
Soon, Bun will include a built-in allow-list that automatically allows lifecycle scripts to be run by popular packages that are known to be safe. This is still under development.
|
||||
Bun includes a default allowlist of popular packages containing `postinstall` scripts that are known to be safe. You can see this list [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt).
|
||||
{% /callout %}
|
||||
|
||||
---
|
||||
|
||||
@@ -18,7 +18,7 @@ for await (const line of console) {
|
||||
Running this file results in a never-ending interactive prompt that echoes whatever the user types.
|
||||
|
||||
```sh
|
||||
$ bun run index.tsx
|
||||
$ bun run index.ts
|
||||
Type something: hello
|
||||
You typed: hello
|
||||
Type something: hello again
|
||||
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
# ...
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
+ with:
|
||||
+ version: 0.7.0 # or "canary"
|
||||
+ bun-version: 1.0.11 # or "latest", "canary", <sha>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -2,31 +2,10 @@
|
||||
name: Install TypeScript declarations for Bun
|
||||
---
|
||||
|
||||
To install TypeScript definitions for Bun's built-in APIs in your project, install `bun-types`.
|
||||
To install TypeScript definitions for Bun's built-in APIs in your project, install `@types/bun`.
|
||||
|
||||
```sh
|
||||
$ bun add -d bun-types # dev dependency
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then include `"bun-types"` in the `compilerOptions.types` in your `tsconfig.json`:
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"compilerOptions": {
|
||||
+ "types": ["bun-types"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Unfortunately, setting a value for `"types"` means that TypeScript will ignore other global type definitions, including `lib: ["dom"]`. If you need to add DOM types into your project, add the following [triple-slash directives](https://www.typescriptlang.org/docs/handbook/triple-slash-directives.html) at the top of any TypeScript file in your project.
|
||||
|
||||
```ts
|
||||
/// <reference lib="dom" />
|
||||
/// <reference lib="dom.iterable" />
|
||||
$ bun add -d @types/bun # dev dependency
|
||||
```
|
||||
|
||||
---
|
||||
@@ -36,30 +15,30 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
|
||||
```jsonc
|
||||
{
|
||||
"compilerOptions": {
|
||||
// add Bun type definitions
|
||||
"types": ["bun-types"],
|
||||
|
||||
// enable latest features
|
||||
"lib": ["esnext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
|
||||
// if TS 5.x+
|
||||
"moduleResolution": "bundler",
|
||||
"noEmit": true,
|
||||
"allowImportingTsExtensions": true,
|
||||
"lib": ["ESNext"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
// if TS 4.x or earlier
|
||||
// "moduleResolution": "nodenext",
|
||||
|
||||
"jsx": "react-jsx", // support JSX
|
||||
"allowJs": true, // allow importing `.js` from `.ts`
|
||||
"esModuleInterop": true, // allow default imports for CommonJS modules
|
||||
|
||||
// best practices
|
||||
// Bundler mode
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"noEmit": true,
|
||||
|
||||
// Best practices
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"skipLibCheck": true
|
||||
"skipLibCheck": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
|
||||
// Some stricter flags
|
||||
"useUnknownInCatchVariables": true,
|
||||
"noPropertyAccessFromIndexSignature": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Convert a ReadableStream to a JSON
|
||||
name: Convert a ReadableStream to JSON
|
||||
---
|
||||
|
||||
Bun provides a number of convenience functions for reading the contents of a [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) into different formats.
|
||||
|
||||
@@ -32,7 +32,6 @@ Some notable missing features:
|
||||
|
||||
- `expect.extend()`
|
||||
- `expect().toMatchInlineSnapshot()`
|
||||
- `expect().toHaveBeenCalledWith()`
|
||||
- `expect().toHaveReturned()`
|
||||
|
||||
---
|
||||
|
||||
@@ -64,7 +64,7 @@ Ran 2 tests across 1 files. [15.00ms]
|
||||
All tests have a name, defined using the first parameter to the `test` function. Tests can also be grouped into suites with `describe`.
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
import { test, expect, describe } from "bun:test";
|
||||
|
||||
describe("math", () => {
|
||||
test("add", () => {
|
||||
|
||||
@@ -13,7 +13,7 @@ const hash = await Bun.password.hash(password);
|
||||
|
||||
---
|
||||
|
||||
By default this uses the [Argon2id](https://en.wikipedia.org/wiki/Argon2) algorithm. Pass a second argument to `Bun.hash.password()` to use a different algorithm or configure the hashing parameters.
|
||||
By default, this uses the [Argon2id](https://en.wikipedia.org/wiki/Argon2) algorithm. Pass a second argument to `Bun.password.hash()` to use a different algorithm or configure the hashing parameters.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
@@ -39,7 +39,7 @@ const bcryptHash = await Bun.password.hash(password, {
|
||||
|
||||
---
|
||||
|
||||
To verify a password, use `Bun.password.verify()`. The algorithm and its parameters are stored in the hash itself, so there's no need to re-specify any configuration.
|
||||
Use `Bun.password.verify()` to verify a password. The algorithm and its parameters are stored in the hash itself, so re-specifying configuration is unnecessary.
|
||||
|
||||
```ts
|
||||
const password = "super-secure-pa$$word";
|
||||
|
||||
@@ -13,7 +13,7 @@ $ bun run start # run the `start` script
|
||||
$ bun install <pkg> # install a package
|
||||
$ bun build ./index.tsx # bundle a project for browsers
|
||||
$ bun test # run tests
|
||||
$ bunx cowsay "Hello, world!" # execute a package
|
||||
$ bunx cowsay 'Hello, world!' # execute a package
|
||||
```
|
||||
|
||||
{% callout type="note" %}
|
||||
|
||||
@@ -37,7 +37,7 @@ Once a package is downloaded into the cache, Bun still needs to copy those files
|
||||
|
||||
Since Bun uses hardlinks to "copy" a module into a project's `node_modules` directory on Linux, the contents of the package only exist in a single location on disk, greatly reducing the amount of disk space dedicated to `node_modules`.
|
||||
|
||||
This benefit does not extend to macOS, which uses `clonefile` for performance reasons.
|
||||
This benefit also applies to macOS, but there are exceptions. It uses `clonefile` which is copy-on-write, meaning it will not occupy disk space, but it will count towards drive's limit. This behavior is useful if something attempts to patch `node_modules/*`, so it's impossible to affect other installations.
|
||||
|
||||
{% details summary="Installation strategies" %}
|
||||
This behavior is configurable with the `--backend` flag, which is respected by all of Bun's package management commands.
|
||||
|
||||
@@ -91,6 +91,9 @@ frozenLockfile = false
|
||||
|
||||
# equivalent to `--dry-run` flag
|
||||
dryRun = false
|
||||
|
||||
# equivalent to `--concurrent-scripts` flag
|
||||
concurrentScripts = 16 # (cpu count or GOMAXPROCS) x2
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
@@ -35,6 +35,8 @@ Instead of executing arbitrary scripts, Bun uses a "default-secure" approach. Yo
|
||||
|
||||
Once added to `trustedDependencies`, install/re-install the package. Bun will read this field and run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
As of Bun v1.0.16, the top 500 npm packages with lifecycle scripts are allowed by default. You can see the full list [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt).
|
||||
|
||||
## `--ignore-scripts`
|
||||
|
||||
To disable lifecycle scripts for all packages, use the `--ignore-scripts` flag.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Bun supports npm's `"overrides"` and Yarn's `"resolutions"` in `package.json`. These are mechanisms for specifying a version range for _metadependencies_—the dependencies of your dependencies. Refer to [Package manager > Overrides and resolutions](/docs/install/overrides) for complete documentation.
|
||||
Bun supports npm's `"overrides"` and Yarn's `"resolutions"` in `package.json`. These are mechanisms for specifying a version range for _metadependencies_—the dependencies of your dependencies.
|
||||
|
||||
```json-diff#package.json
|
||||
{
|
||||
|
||||
@@ -17,7 +17,7 @@ $ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
|
||||
$ curl -fsSL https://bun.sh/install | bash -s "bun-v1.0.0"
|
||||
```
|
||||
|
||||
```bash#NPM
|
||||
```bash#npm
|
||||
$ npm install -g bun # the last `npm` command you'll ever need
|
||||
```
|
||||
|
||||
@@ -39,17 +39,30 @@ $ proto install bun
|
||||
|
||||
### Windows
|
||||
|
||||
Bun provides a _limited, experimental_ native build for Windows. At the moment, only the Bun runtime is supported.
|
||||
{% callout %}
|
||||
Bun requires a minimum of Windows 10 version 1809
|
||||
{% /callout %}
|
||||
|
||||
- `bun <file>`
|
||||
- `bun run <file>`
|
||||
Bun provides a _limited, experimental_ native build for Windows. It is recommended to use Bun within [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install) and follow the above instructions. To help catch bugs, the experimental build enables many debugging assertions, which will make the binary slower than what the stable version will be.
|
||||
|
||||
The test runner, package manager, and bundler are still under development. The following commands have been disabled.
|
||||
To install, paste this into a terminal:
|
||||
|
||||
- `bun test`
|
||||
- `bun install/add/remove`
|
||||
- `bun link/unlink`
|
||||
- `bun build`
|
||||
{% codetabs %}
|
||||
|
||||
```powershell#PowerShell/cmd.exe
|
||||
# WARNING: No stability is guaranteed on the experimental Windows builds
|
||||
powershell -c "irm bun.sh/install.ps1|iex"
|
||||
```
|
||||
|
||||
```powershell#Scoop
|
||||
# WARNING: No stability is guaranteed on the experimental Windows builds
|
||||
scoop bucket add versions
|
||||
scoop install bun-canary
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For support and discussion, please join the [#windows channel on our Discord](http://bun.sh/discord).
|
||||
|
||||
## Docker
|
||||
|
||||
|
||||
10
docs/nav.ts
10
docs/nav.ts
@@ -116,6 +116,9 @@ export default {
|
||||
page("runtime/nodejs-apis", "Node.js compatibility", {
|
||||
description: `Bun aims for full Node.js compatibility. This page tracks the current compatibility status.`,
|
||||
}),
|
||||
page("bundler/executables", "Single-file executable", {
|
||||
description: "Compile a TypeScript or JavaScript file to a standalone executable",
|
||||
}),
|
||||
page("runtime/plugins", "Plugins", {
|
||||
description: `Implement custom loaders and module resolution logic with Bun's plugin system.`,
|
||||
}),
|
||||
@@ -204,9 +207,6 @@ export default {
|
||||
page("bundler/plugins", "Plugins", {
|
||||
description: `Implement custom loaders and module resolution logic with Bun's plugin system.`,
|
||||
}),
|
||||
page("bundler/executables", "Executables", {
|
||||
description: "Compile a TypeScript or JavaScript file to a standalone executable",
|
||||
}),
|
||||
page("bundler/macros", "Macros", {
|
||||
description: `Run JavaScript functions at bundle-time and inline the results into your bundle`,
|
||||
}),
|
||||
@@ -340,6 +340,10 @@ export default {
|
||||
description: `Bun implements the Node-API spec for building native addons.`,
|
||||
}), // "`Node-API`"),
|
||||
|
||||
page("api/glob", "Glob", {
|
||||
description: `Bun includes a fast native Glob implementation for matching file paths.`,
|
||||
}), // "`Glob`"),
|
||||
|
||||
page("api/semver", "Semver", {
|
||||
description: `Bun's native Semver implementation is 20x faster than the popular \`node-semver\` package.`,
|
||||
}), // "`Semver`"),
|
||||
|
||||
@@ -1,21 +1,42 @@
|
||||
The following document is not yet complete, please join the [#windows channel on our Discord](http://bun.sh/discord) for help.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### System Dependencies
|
||||
{% details summary="Extra notes for Bun Core Team Members" %}
|
||||
|
||||
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload. You should install Git and CMake from here, if not already installed.
|
||||
- Ninja
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
- Perl
|
||||
- Ruby
|
||||
- Node.js (until bun runs stably on windows)
|
||||
Here are the extra steps I ran on my fresh windows machine (some of these are a little opiniated)
|
||||
|
||||
<!--
|
||||
TODO: missing the rest of the things
|
||||
- Change user to a local account (set username to `window` and empty password)
|
||||
- (Empty password will disable the password and auto-login on boot)
|
||||
- Set Windows Terminal as default terminal
|
||||
- Install latest version of Powershell
|
||||
- Display scale to 100%
|
||||
- Remove McAfee and enable Windows Defender (default antivirus, does not nag you)
|
||||
- Install Software
|
||||
- OpenSSH server (run these in an elevated terminal)
|
||||
- `Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0`
|
||||
- `Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0`
|
||||
- `Start-Service sshd`
|
||||
- `Set-Service -Name sshd -StartupType 'Automatic'`
|
||||
- `New-ItemProperty -Path "HKLM:\SOFTWARE\OpenSSH" -Name DefaultShell -Value "C:\Program Files\PowerShell\7\pwsh.exe" -PropertyType String -Force`
|
||||
- Configure in `C:\ProgramData\ssh`
|
||||
- Add ssh keys (in ProgramData because it is an admin account)
|
||||
- Tailscale (login with GitHub so it joins the team tailnet)
|
||||
- Visual Studio Code
|
||||
- Configure `git`
|
||||
- `git config user.name "your name"`
|
||||
- `git config user.email "your@email"`
|
||||
- Disable sleep mode and the lid switch by going to "Power Options" and configuring everything there.
|
||||
|
||||
I recommend using VSCode through SSH instead of Tunnels or the Tailscale extension, it seems to be more reliable.
|
||||
|
||||
{% /details %}
|
||||
|
||||
Make sure to use powershell with the proper shell environment loaded. To do so, you can run:
|
||||
|
||||
```ps1
|
||||
.\scripts\env.ps1
|
||||
```
|
||||
winget install OpenJS.NodeJS.LTS
|
||||
``` -->
|
||||
|
||||
### Enable Scripts
|
||||
|
||||
@@ -25,12 +46,52 @@ By default, scripts are blocked.
|
||||
Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
```
|
||||
|
||||
### System Dependencies
|
||||
|
||||
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload.
|
||||
- Install Git and CMake from here, if not already installed.
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
|
||||
- LLVM 16
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
- Perl
|
||||
- Ruby
|
||||
- Node.js (until bun is stable enough on windows)
|
||||
|
||||
[Scoop](https://scoop.sh) can be used to install these easily.
|
||||
|
||||
```bash
|
||||
scoop install nodejs-lts go rust nasm ruby perl
|
||||
scoop llvm@16.0.4 # scoop bug if you install llvm and the rest at the same time
|
||||
```
|
||||
|
||||
If you intend on building WebKit locally (optional), you should install some more packages:
|
||||
|
||||
```bash
|
||||
scoop install make cygwin python
|
||||
```
|
||||
|
||||
From here on out, it is **expected you use a Developer PowerShell Terminal with `.\scripts\env.ps1 sourced**. This script is available in the Bun repository and can be loaded by executing it.
|
||||
|
||||
```ps1
|
||||
$ .\scripts\env.ps1
|
||||
```
|
||||
|
||||
To verify, you can check for an MSVC-only command line such as `mt.exe`
|
||||
|
||||
```ps1
|
||||
Get-Command mt
|
||||
```
|
||||
|
||||
### Zig
|
||||
|
||||
Bun pins a version of Zig. As the compiler is still in development, breaking changes happen often that will break the build. It is recommended to use [Zigup](https://github.com/marler8997/zigup/releases) as it can quickly switch to any version by name, but you can also [manually download Zig](https://ziglang.org/download/).
|
||||
|
||||
```bash
|
||||
$ zigup 0.12.0-dev.1297+a9e66ed73
|
||||
$ zigup 0.12.0-dev.1604+caae40c21
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
@@ -39,18 +100,20 @@ We last updated Zig on **October 26th, 2023**
|
||||
|
||||
### Codegen
|
||||
|
||||
On Unix platforms, we depend on an existing build of Bun to generate code for itself. Since the Windows branch is not stable enough for this to pass, you currently need to generate the code.
|
||||
|
||||
On a system with Bun installed, run:
|
||||
On Unix platforms, we depend on an existing build of Bun to generate code for itself. Since the Windows build is not stable enough for this to run the code generators, you currently need to use another computer or WSL to generate this:
|
||||
|
||||
```bash
|
||||
$ bash ./scripts/cross-compile-codegen.sh win32 x64
|
||||
# -> build-codegen-win32-x64
|
||||
$ wsl --install # run twice if it doesnt install
|
||||
# in the linux environment
|
||||
$ sudo apt install unzip
|
||||
$ curl -fsSL https://bun.sh/install | bash
|
||||
```
|
||||
|
||||
Copy the contents of this to the Windows machine into a folder named `build`
|
||||
Whenever codegen-related things are updated, please re-run
|
||||
|
||||
TODO: Use WSL to automatically run codegen without a separate machine.
|
||||
```ps1
|
||||
$ .\scripts\codegen.ps1
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
@@ -58,11 +121,11 @@ TODO: Use WSL to automatically run codegen without a separate machine.
|
||||
npm install
|
||||
|
||||
.\scripts\env.ps1
|
||||
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\all-dependencies.ps1
|
||||
.\scripts\codegen.ps1
|
||||
|
||||
cd build # this was created by the codegen script in the prerequisites
|
||||
cd build # this was created by the codegen.ps1 script in the prerequisites
|
||||
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug
|
||||
ninja
|
||||
@@ -73,3 +136,13 @@ If this was successful, you should have a `bun-debug.exe` in the `build` folder.
|
||||
```ps1
|
||||
.\bun-debug.exe --version
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### .rc file fails to build
|
||||
|
||||
`llvm-rc.exe` is odd. don't use it. use `rc.exe`, to do this make sure you are in a visual studio dev terminal, check `rc /?` to ensure it is `Microsoft Resource Compiler`
|
||||
|
||||
### failed to write output 'bun-debug.exe': permission denied
|
||||
|
||||
you cannot overwrite `bun-debug.exe` if it is already open. you likely have a running instance, maybe in the vscode debugger?
|
||||
|
||||
@@ -2,6 +2,30 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, you must use a WSL environment as Bun does not yet compile on Windows natively.
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ sudo apt install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
|
||||
|
||||
{% codetabs %}
|
||||
@@ -71,7 +95,7 @@ If not, run this to manually link it:
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
$ export PATH="$PATH:$(brew --prefix llvm@16)/bin"
|
||||
$ export PATH="$(brew --prefix llvm@16)/bin:$PATH"
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
@@ -81,43 +105,6 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin"
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install the rest of Bun's dependencies:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go libiconv libtool ninja pkg-config rust
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ sudo apt install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc libatomic-static libstdc++-static sed unzip
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## Install Zig
|
||||
|
||||
Zig can be installed either with our npm package [`@oven/zig`](https://www.npmjs.com/package/@oven/zig), or by using [zigup](https://github.com/marler8997/zigup).
|
||||
|
||||
```bash
|
||||
$ bun install -g @oven/zig
|
||||
$ zigup 0.12.0-dev.1297+a9e66ed73
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
We last updated Zig on **October 26th, 2023**
|
||||
{% /callout %}
|
||||
|
||||
## Building Bun
|
||||
|
||||
After cloning the repository, run the following command to run the first build. This may take a while as it will clone submodules and build dependencies.
|
||||
@@ -143,7 +130,7 @@ These two scripts, `setup` and `build`, are aliases to do roughly the following:
|
||||
|
||||
```bash
|
||||
$ ./scripts/setup.sh
|
||||
$ cmake -S . -G Ninja -B build -DCMAKE_BUILD_TYPE=Debug
|
||||
$ cmake -S . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug
|
||||
$ ninja -C build # 'bun run build' runs just this
|
||||
```
|
||||
|
||||
@@ -300,12 +287,13 @@ If you see this error when compiling, run:
|
||||
$ xcode-select --install
|
||||
```
|
||||
|
||||
## Arch Linux / Cannot find `libatomic.a`
|
||||
## Cannot find `libatomic.a`
|
||||
|
||||
Bun requires `libatomic` to be statically linked. On Arch Linux, it is only given as a shared library, but as a workaround you can symlink it to get the build working locally.
|
||||
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
|
||||
|
||||
```bash
|
||||
$ sudo ln -s /lib/libatomic.so /lib/libatomic.a
|
||||
$ cmake -Bbuild -GNinja -DUSE_STATIC_LIBATOMIC=ON
|
||||
$ ninja -Cbuild
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
@@ -30,6 +30,13 @@ Bun statically links these libraries:
|
||||
|
||||
---
|
||||
|
||||
---
|
||||
|
||||
- [`brotli`](https://github.com/google/brotli)
|
||||
- MIT
|
||||
|
||||
---
|
||||
|
||||
- [`libarchive`](https://github.com/libarchive/libarchive)
|
||||
- [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING)
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ Bun can also execute `"scripts"` from your `package.json`. Add the following scr
|
||||
+ "start": "bun run index.ts"
|
||||
+ },
|
||||
"devDependencies": {
|
||||
"bun-types": "^0.7.0"
|
||||
"@types/bun": "^1.0.0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -54,7 +54,7 @@ Click the link in the right column to jump to the associated documentation.
|
||||
|
||||
---
|
||||
|
||||
- HTML Rewriting
|
||||
- Streaming HTML Transformations
|
||||
- [`HTMLRewriter`](/docs/api/html-rewriter)
|
||||
|
||||
---
|
||||
@@ -94,6 +94,11 @@ Click the link in the right column to jump to the associated documentation.
|
||||
|
||||
---
|
||||
|
||||
- Glob
|
||||
- [`Bun.Glob`](/docs/api/glob)
|
||||
|
||||
---
|
||||
|
||||
- Utilities
|
||||
- [`Bun.version`](/docs/api/utils#bun-version) [`Bun.revision`](/docs/api/utils#bun-revision) [`Bun.env`](/docs/api/utils#bun-env) [`Bun.main`](/docs/api/utils#bun-main) [`Bun.sleep()`](/docs/api/utils#bun-sleep) [`Bun.sleepSync()`](/docs/api/utils#bun-sleepsync) [`Bun.which()`](/docs/api/utils#bun-which) [`Bun.peek()`](/docs/api/utils#bun-peek) [`Bun.openInEditor()`](/docs/api/utils#bun-openineditor) [`Bun.deepEquals()`](/docs/api/utils#bun-deepequals) [`Bun.escapeHTML()`](/docs/api/utils#bun-escapehtml) [`Bun.fileURLToPath()`](/docs/api/utils#bun-fileurltopath) [`Bun.pathToFileURL()`](/docs/api/utils#bun-pathtofileurl) [`Bun.gzipSync()`](/docs/api/utils#bun-gzipsync) [`Bun.gunzipSync()`](/docs/api/utils#bun-gunzipsync) [`Bun.deflateSync()`](/docs/api/utils#bun-deflatesync) [`Bun.inflateSync()`](/docs/api/utils#bun-inflatesync) [`Bun.inspect()`](/docs/api/utils#bun-inspect) [`Bun.nanoseconds()`](/docs/api/utils#bun-nanoseconds) [`Bun.readableStreamTo*()`](/docs/api/utils#bun-readablestreamto) [`Bun.resolveSync()`](/docs/api/utils#bun-resolvesync)
|
||||
|
||||
|
||||
@@ -313,7 +313,7 @@ frozenLockfile = false
|
||||
|
||||
### `install.dryRun`
|
||||
|
||||
Whether to install optional dependencies. Default `false`. When true, it's equivalent to setting `--dry-run` on all `bun install` commands.
|
||||
Whether `bun install` will actually install dependencies. Default `false`. When true, it's equivalent to setting `--dry-run` on all `bun install` commands.
|
||||
|
||||
```toml
|
||||
[install]
|
||||
|
||||
@@ -25,9 +25,25 @@ Or programmatically by assigning a property to `process.env`.
|
||||
process.env.FOO = "hello";
|
||||
```
|
||||
|
||||
### Manually specifying `.env` files
|
||||
|
||||
Bun supports `--env-file` to override which specific `.env` file to load. You can use `--env-file` when running scripts in bun's runtime, or when running package.json scripts.
|
||||
|
||||
```sh
|
||||
bun --env-file=.env.1 src/index.ts
|
||||
|
||||
bun --env-file=.env.abc --env-file=.env.def run build
|
||||
```
|
||||
|
||||
### Quotation marks
|
||||
|
||||
Bun supports double quotes, single quotes, and
|
||||
Bun supports double quotes, single quotes, and template literal backticks:
|
||||
|
||||
```txt#.env
|
||||
FOO='hello'
|
||||
FOO="hello"
|
||||
FOO=`hello`
|
||||
```
|
||||
|
||||
### Expansion
|
||||
|
||||
@@ -75,10 +91,11 @@ The current environment variables can be accessed via `process.env`.
|
||||
process.env.API_TOKEN; // => "secret"
|
||||
```
|
||||
|
||||
Bun also exposes these variables via `Bun.env`, which is a simple alias of `process.env`.
|
||||
Bun also exposes these variables via `Bun.env` and `import.meta.env`, which is a simple alias of `process.env`.
|
||||
|
||||
```ts
|
||||
Bun.env.API_TOKEN; // => "secret"
|
||||
import.meta.env.API_TOKEN; // => "secret"
|
||||
```
|
||||
|
||||
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
|
||||
@@ -126,6 +143,11 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
|
||||
---
|
||||
|
||||
- `BUN_RUNTIME_TRANSPILER_CACHE_PATH`
|
||||
- The runtime transpiler caches the transpiled output of source files larger than 50 kb. This makes CLIs using Bun load faster. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is set, then the runtime transpiler will cache transpiled output to the specified directory. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is set to an empty string or the string `"0"`, then the runtime transpiler will not cache transpiled output. If `BUN_RUNTIME_TRANSPILER_CACHE_PATH` is unset, then the runtime transpiler will cache transpiled output to the platform-specific cache directory.
|
||||
|
||||
---
|
||||
|
||||
- `TMPDIR`
|
||||
- Bun occasionally requires a directory to store intermediate assets during bundling or other operations. If unset, defaults to the platform-specific temporary directory: `/tmp` on Linux, `/private/tmp` on macOS.
|
||||
|
||||
@@ -142,6 +164,31 @@ These environment variables are read by Bun and configure aspects of its behavio
|
||||
---
|
||||
|
||||
- `DO_NOT_TRACK`
|
||||
- If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.
|
||||
- Telemetry is not sent yet as of November 28th, 2023, but we are planning to add telemetry in the coming months. If `DO_NOT_TRACK=1`, then analytics are [disabled](https://do-not-track.dev/). Bun records bundle timings (so we can answer with data, "is Bun getting faster?") and feature usage (e.g., "are people actually using macros?"). The request body size is about 60 bytes, so it's not a lot of data. Equivalent of `telemetry=false` in bunfig.
|
||||
|
||||
{% /table %}
|
||||
|
||||
## Runtime transpiler caching
|
||||
|
||||
For files larger than 50 KB, Bun caches transpiled output into `$BUN_RUNTIME_TRANSPILER_CACHE_PATH` or the platform-specific cache directory. This makes CLIs using Bun load faster.
|
||||
|
||||
This transpiler cache is global and shared across all projects. It is safe to delete the cache at any time. It is a content-addressable cache, so it will never contain duplicate entries. It is also safe to delete the cache while a Bun process is running.
|
||||
|
||||
It is recommended to disable this cache when using ephemeral filesystems like Docker. Bun's Docker images automatically disable this cache.
|
||||
|
||||
### Disable the runtime transpiler cache
|
||||
|
||||
To disable the runtime transpiler cache, set `BUN_RUNTIME_TRANSPILER_CACHE_PATH` to an empty string or the string `"0"`.
|
||||
|
||||
```sh
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH=0 bun run dev
|
||||
```
|
||||
|
||||
### What does it cache?
|
||||
|
||||
It caches:
|
||||
|
||||
- The transpiled output of source files larger than 50 KB.
|
||||
- The sourcemap for the transpiled output of the file
|
||||
|
||||
The file extension `.pile` is used for these cached files.
|
||||
|
||||
@@ -80,6 +80,17 @@ $ bun run ./my-wasm-app.whatever
|
||||
**Note** — WASI support is based on [wasi-js](https://github.com/sagemathinc/cowasm/tree/main/core/wasi-js). Currently, it only supports WASI binaries that use the `wasi_snapshot_preview1` or `wasi_unstable` APIs. Bun's implementation is not fully optimized for performance; this will become more of a priority as WASM grows in popularity.
|
||||
{% /callout %}
|
||||
|
||||
## SQLite
|
||||
|
||||
You can import sqlite databases directly into your code. Bun will automatically load the database and return a `Database` object.
|
||||
|
||||
```ts
|
||||
import db from "./my.db" with {type: "sqlite"};
|
||||
console.log(db.query("select * from users LIMIT 1").get());
|
||||
```
|
||||
|
||||
This uses [`bun:sqlite`](/docs/api/sqlite).
|
||||
|
||||
## Custom loaders
|
||||
|
||||
Support for additional file types can be implemented with plugins. Refer to [Runtime > Plugins](/docs/bundler/plugins) for full documentation.
|
||||
|
||||
@@ -6,7 +6,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:assert`](https://nodejs.org/api/assert.html)
|
||||
|
||||
🟡 Missing `doesNotMatch`
|
||||
🟢 Fully implemented.
|
||||
|
||||
### [`node:async_hooks`](https://nodejs.org/api/async_hooks.html)
|
||||
|
||||
@@ -52,11 +52,11 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:events`](https://nodejs.org/api/events.html)
|
||||
|
||||
🟡 Missing `on` `addAbortListener` `getMaxListeners`
|
||||
🟡 Missing `addAbortListener` `events.getMaxListeners`
|
||||
|
||||
### [`node:fs`](https://nodejs.org/api/fs.html)
|
||||
|
||||
🟡 Missing `Dir` `fdatasync` `fdatasyncSync` `openAsBlob` `opendir` `opendirSync` `statfs` `statfsSync`. `fs.promises.open` incorrectly returns a file descriptor instead of a `FileHandle`.
|
||||
🟡 Missing `Dir` `openAsBlob` `opendir` `opendirSync` `statfs` `statfsSync`
|
||||
|
||||
### [`node:http`](https://nodejs.org/api/http.html)
|
||||
|
||||
@@ -64,7 +64,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:http2`](https://nodejs.org/api/http2.html)
|
||||
|
||||
🔴 Not implemented.
|
||||
🟡 Client is supported, but server isn't yet.
|
||||
|
||||
### [`node:https`](https://nodejs.org/api/https.html)
|
||||
|
||||
@@ -80,7 +80,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:net`](https://nodejs.org/api/net.html)
|
||||
|
||||
🟡 Missing `BlockList` `SocketAddress` `Stream` `getDefaultAutoSelectFamily` `getDefaultAutoSelectFamilyAttemptTimeout` `setDefaultAutoSelectFamily` `setDefaultAutoSelectFamilyAttemptTimeout` `Server#ref()` `Server#unref()` `Socket#ref()` `Socket#unref()`.
|
||||
🟡 Missing `BlockList` `SocketAddress` `Stream` `getDefaultAutoSelectFamily` `getDefaultAutoSelectFamilyAttemptTimeout` `setDefaultAutoSelectFamily` `setDefaultAutoSelectFamilyAttemptTimeout`
|
||||
|
||||
### [`node:os`](https://nodejs.org/api/os.html)
|
||||
|
||||
@@ -92,7 +92,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:perf_hooks`](https://nodejs.org/api/perf_hooks.html)
|
||||
|
||||
🟡 Only `perf_hooks.performance.now()` and `perf_hooks.performance.timeOrigin` are implemented. Missing `Performance` `PerformanceMark` `PerformanceMeasure` `PerformanceObserverEntryList` `PerformanceResourceTiming` `createHistogram` `monitorEventLoopDelay`. It's recommended to use `performance` global instead of `perf_hooks.performance`.
|
||||
🟡 Missing `createHistogram` `monitorEventLoopDelay`. It's recommended to use `performance` global instead of `perf_hooks.performance`.
|
||||
|
||||
### [`node:process`](https://nodejs.org/api/process.html)
|
||||
|
||||
@@ -126,6 +126,10 @@ Some methods are not optimized yet.
|
||||
|
||||
🟡 See [`node:util`](#node-util).
|
||||
|
||||
### [`node:test`](https://nodejs.org/api/test.html)
|
||||
|
||||
🔴 Not implemented. Use [`bun:test`](https://bun.sh/docs/cli/test) instead.
|
||||
|
||||
### [`node:timers`](https://nodejs.org/api/timers.html)
|
||||
|
||||
🟢 Recommended to use global `setTimeout`, et. al. instead.
|
||||
@@ -148,7 +152,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:util`](https://nodejs.org/api/util.html)
|
||||
|
||||
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `parseArgs` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
|
||||
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
|
||||
|
||||
### [`node:v8`](https://nodejs.org/api/v8.html)
|
||||
|
||||
@@ -164,7 +168,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:worker_threads`](https://nodejs.org/api/worker_threads.html)
|
||||
|
||||
🟡 `Worker` doesn't support the following options: `eval` `argv` `execArgv` `stdin` `stdout` `stderr` `trackedUnmanagedFds` `resourceLimits`. Missing `markAsUntransferable` `moveMessagePortToContext` `getHeapSnapshot`.
|
||||
🟡 `Worker` doesn't support the following options: `eval` `stdin` `stdout` `stderr` `trackedUnmanagedFds` `resourceLimits`. Missing `markAsUntransferable` `moveMessagePortToContext` `getHeapSnapshot`.
|
||||
|
||||
### [`node:zlib`](https://nodejs.org/api/zlib.html)
|
||||
|
||||
@@ -336,7 +340,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
|
||||
|
||||
### [`process`](https://nodejs.org/api/process.html)
|
||||
|
||||
🟡 Missing `domain` `hasUncaughtExceptionCaptureCallback` `initgroups` `report` `resourceUsage` `setUncaughtExceptionCaptureCallback` `setegid` `seteuid` `setgid` `setgroups` `setuid` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled` `channel`. `process.binding` is partially implemented.
|
||||
🟡 Missing `domain` `hasUncaughtExceptionCaptureCallback` `initgroups` `resourceUsage` `setUncaughtExceptionCaptureCallback` `setegid` `seteuid` `setgid` `setgroups` `setuid` `allowedNodeEnvironmentFlags` `getActiveResourcesInfo` `setActiveResourcesInfo` `moduleLoadList` `setSourceMapsEnabled` `channel`. `process.binding` is partially implemented.
|
||||
|
||||
### [`queueMicrotask()`](https://developer.mozilla.org/en-US/docs/Web/API/queueMicrotask)
|
||||
|
||||
|
||||
@@ -15,9 +15,11 @@ const myPlugin: BunPlugin = {
|
||||
// implementation
|
||||
},
|
||||
};
|
||||
|
||||
plugin(myPlugin);
|
||||
```
|
||||
|
||||
Plugins have to be registered before any other code runs! To achieve this, use the `preload` option in your [`bunfig.toml`](/docs/runtime/bunfig). Bun automatically loads the files/modules specified in `preload` before running a file.
|
||||
Plugins have to be loaded before any other code runs! To achieve this, use the `preload` option in your [`bunfig.toml`](/docs/runtime/bunfig). Bun automatically loads the files/modules specified in `preload` before running a file.
|
||||
|
||||
```toml
|
||||
preload = ["./myPlugin.ts"]
|
||||
@@ -65,13 +67,12 @@ plugin({
|
||||
name: "YAML",
|
||||
async setup(build) {
|
||||
const { load } = await import("js-yaml");
|
||||
const { readFileSync } = await import("fs");
|
||||
|
||||
// when a .yaml file is imported...
|
||||
build.onLoad({ filter: /\.(yaml|yml)$/ }, (args) => {
|
||||
build.onLoad({ filter: /\.(yaml|yml)$/ }, async (args) => {
|
||||
|
||||
// read and parse the file
|
||||
const text = readFileSync(args.path, "utf8");
|
||||
const text = await Bun.file(args.path).text();
|
||||
const exports = load(text) as Record<string, any>;
|
||||
|
||||
// and returns it as a module
|
||||
@@ -182,13 +183,12 @@ plugin({
|
||||
name: "svelte loader",
|
||||
async setup(build) {
|
||||
const { compile } = await import("svelte/compiler");
|
||||
const { readFileSync } = await import("fs");
|
||||
|
||||
// when a .svelte file is imported...
|
||||
build.onLoad({ filter: /\.svelte$/ }, ({ path }) => {
|
||||
build.onLoad({ filter: /\.svelte$/ }, async ({ path }) => {
|
||||
|
||||
// read and compile it with the Svelte compiler
|
||||
const file = readFileSync(path, "utf8");
|
||||
const file = await Bun.file(path).text();
|
||||
const contents = compile(file, {
|
||||
filename: path,
|
||||
generate: "ssr",
|
||||
@@ -237,7 +237,7 @@ plugin({
|
||||
|
||||
setup(build) {
|
||||
build.module(
|
||||
// The specifier, which can be any string
|
||||
// The specifier, which can be any string - except a built-in, such as "buffer"
|
||||
"my-transpiled-virtual-module",
|
||||
// The callback to run when the module is imported or required for the first time
|
||||
() => {
|
||||
|
||||
@@ -32,7 +32,7 @@ The following Web APIs are partially or completely supported.
|
||||
---
|
||||
|
||||
- WebSockets
|
||||
- [`WebSocket`](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) (_not production ready_)
|
||||
- [`WebSocket`](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket)
|
||||
|
||||
---
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user